summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--AUTHORS3
-rw-r--r--CHANGES315
-rw-r--r--DEVELOPERS95
-rw-r--r--INSTALL.Unix231
-rw-r--r--INSTALL.Windows148
-rw-r--r--Makefile.am61
-rw-r--r--NEWS131
-rw-r--r--NOTICE10
-rw-r--r--README500
-rw-r--r--THANKS38
-rw-r--r--acinclude.m4.in6
-rw-r--r--bin/couchdb.bat.tpl.in6
-rw-r--r--bin/couchdb.tpl.in21
-rwxr-xr-xbootstrap6
-rw-r--r--configure.ac12
-rw-r--r--etc/Makefile.am21
-rw-r--r--etc/couchdb/Makefile.am3
-rw-r--r--etc/couchdb/default.ini.tpl.in56
-rw-r--r--etc/couchdb/local.ini40
-rw-r--r--etc/init/couchdb.tpl.in4
-rw-r--r--etc/windows/Makefile.am13
-rw-r--r--license.skip64
-rw-r--r--share/Makefile.am42
-rw-r--r--share/server/filter.js3
-rw-r--r--share/server/json2.js482
-rw-r--r--share/server/loop.js5
-rw-r--r--share/server/render.js25
-rw-r--r--share/server/state.js7
-rw-r--r--share/server/util.js150
-rw-r--r--share/server/views.js13
-rw-r--r--share/www/_sidebar.html4
-rw-r--r--share/www/config.html45
-rw-r--r--share/www/couch_tests.html17
-rw-r--r--share/www/custom_test.html4
-rw-r--r--share/www/database.html37
-rw-r--r--share/www/dialog/_admin_party.html2
-rw-r--r--share/www/dialog/_compact_cleanup.html (renamed from share/www/dialog/_compact_view.html)29
-rw-r--r--share/www/dialog/_create_admin.html2
-rw-r--r--share/www/dialog/_create_config.html (renamed from share/www/dialog/_compact_database.html)24
-rw-r--r--share/www/dialog/_database_security.html50
-rw-r--r--share/www/dialog/_login.html4
-rw-r--r--share/www/dialog/_share_test_reports.html42
-rw-r--r--share/www/dialog/_signup.html2
-rw-r--r--share/www/dialog/_view_cleanup.html28
-rw-r--r--share/www/document.html5
-rw-r--r--share/www/image/key.pngbin0 -> 859 bytes
-rw-r--r--share/www/image/spinner.gifbin1849 -> 3008 bytes
-rw-r--r--share/www/index.html4
-rw-r--r--share/www/replicator.html47
-rw-r--r--share/www/script/base64.js124
-rw-r--r--share/www/script/couch.js124
-rw-r--r--share/www/script/couch_test_runner.js121
-rw-r--r--share/www/script/couch_tests.js70
-rw-r--r--share/www/script/futon.browse.js199
-rw-r--r--share/www/script/futon.format.js24
-rw-r--r--share/www/script/futon.js137
-rw-r--r--share/www/script/jquery.couch.js292
-rw-r--r--share/www/script/jquery.dialog.js2
-rw-r--r--share/www/script/jquery.form.js986
-rw-r--r--share/www/script/jquery.js6402
-rw-r--r--share/www/script/json2.js20
-rw-r--r--share/www/script/jspec/jspec.css149
-rw-r--r--share/www/script/jspec/jspec.jquery.js72
-rw-r--r--share/www/script/jspec/jspec.js1756
-rw-r--r--share/www/script/jspec/jspec.xhr.js195
-rw-r--r--share/www/script/oauth.js2
-rw-r--r--share/www/script/test/attachment_names.js4
-rw-r--r--share/www/script/test/attachment_paths.js14
-rw-r--r--share/www/script/test/attachment_ranges.js134
-rw-r--r--share/www/script/test/attachment_views.js4
-rw-r--r--share/www/script/test/attachments.js50
-rw-r--r--share/www/script/test/attachments_multipart.js46
-rw-r--r--share/www/script/test/auth_cache.js280
-rw-r--r--share/www/script/test/basics.js33
-rw-r--r--share/www/script/test/batch_save.js5
-rw-r--r--share/www/script/test/bulk_docs.js10
-rw-r--r--share/www/script/test/changes.js357
-rw-r--r--share/www/script/test/compact.js10
-rw-r--r--share/www/script/test/config.js106
-rw-r--r--share/www/script/test/conflicts.js2
-rw-r--r--share/www/script/test/content_negotiation.js9
-rw-r--r--share/www/script/test/cookie_auth.js99
-rw-r--r--share/www/script/test/copy_doc.js3
-rw-r--r--share/www/script/test/delayed_commits.js29
-rw-r--r--share/www/script/test/design_docs.js398
-rw-r--r--share/www/script/test/erlang_views.js20
-rw-r--r--share/www/script/test/etags_views.js104
-rw-r--r--share/www/script/test/http.js6
-rw-r--r--share/www/script/test/jsonp.js69
-rw-r--r--share/www/script/test/list_views.js45
-rw-r--r--share/www/script/test/method_override.js40
-rw-r--r--share/www/script/test/oauth.js42
-rw-r--r--share/www/script/test/proxyauth.js130
-rw-r--r--share/www/script/test/purge.js14
-rw-r--r--share/www/script/test/reader_acl.js198
-rw-r--r--share/www/script/test/recreate_doc.js2
-rw-r--r--share/www/script/test/reduce.js9
-rw-r--r--share/www/script/test/reduce_builtin.js66
-rw-r--r--share/www/script/test/replication.js467
-rw-r--r--share/www/script/test/replicator_db.js818
-rw-r--r--share/www/script/test/rewrite.js443
-rw-r--r--share/www/script/test/security_validation.js97
-rw-r--r--share/www/script/test/show_documents.js103
-rw-r--r--share/www/script/test/stats.js31
-rw-r--r--share/www/script/test/update_documents.js22
-rw-r--r--share/www/script/test/users_db.js89
-rw-r--r--share/www/script/test/uuids.js18
-rw-r--r--share/www/script/test/view_collation.js12
-rw-r--r--share/www/script/test/view_collation_raw.js12
-rw-r--r--share/www/script/test/view_compaction.js104
-rw-r--r--share/www/script/test/view_errors.js54
-rw-r--r--share/www/script/test/view_multi_key_all_docs.js37
-rw-r--r--share/www/script/test/view_multi_key_design.js84
-rw-r--r--share/www/script/test/view_pagination.js123
-rw-r--r--share/www/script/test/view_sandboxing.js92
-rw-r--r--share/www/script/test/view_update_seq.js106
-rw-r--r--share/www/script/test/view_xml.js4
-rw-r--r--share/www/session.html96
-rw-r--r--share/www/spec/couch_js_class_methods_spec.js401
-rw-r--r--share/www/spec/couch_js_instance_methods_1_spec.js311
-rw-r--r--share/www/spec/couch_js_instance_methods_2_spec.js246
-rw-r--r--share/www/spec/couch_js_instance_methods_3_spec.js215
-rw-r--r--share/www/spec/custom_helpers.js51
-rw-r--r--share/www/spec/jquery_couch_js_class_methods_spec.js523
-rw-r--r--share/www/spec/jquery_couch_js_instance_methods_1_spec.js202
-rw-r--r--share/www/spec/jquery_couch_js_instance_methods_2_spec.js433
-rw-r--r--share/www/spec/jquery_couch_js_instance_methods_3_spec.js540
-rw-r--r--share/www/spec/run.html46
-rw-r--r--share/www/status.html4
-rw-r--r--share/www/style/layout.css52
-rw-r--r--src/couchdb/Makefile.am22
-rw-r--r--src/couchdb/couch.app.tpl.in2
-rw-r--r--src/couchdb/couch_app.erl5
-rw-r--r--src/couchdb/couch_auth_cache.erl419
-rw-r--r--src/couchdb/couch_btree.erl50
-rw-r--r--src/couchdb/couch_changes.erl334
-rw-r--r--src/couchdb/couch_config.erl46
-rw-r--r--src/couchdb/couch_db.erl370
-rw-r--r--src/couchdb/couch_db.hrl67
-rw-r--r--src/couchdb/couch_db_updater.erl251
-rw-r--r--src/couchdb/couch_doc.erl304
-rw-r--r--src/couchdb/couch_external_server.erl4
-rw-r--r--src/couchdb/couch_file.erl228
-rw-r--r--src/couchdb/couch_httpd.erl310
-rw-r--r--src/couchdb/couch_httpd_auth.erl302
-rw-r--r--src/couchdb/couch_httpd_db.erl777
-rw-r--r--src/couchdb/couch_httpd_external.erl26
-rw-r--r--src/couchdb/couch_httpd_misc_handlers.erl111
-rw-r--r--src/couchdb/couch_httpd_oauth.erl24
-rw-r--r--src/couchdb/couch_httpd_proxy.erl425
-rw-r--r--src/couchdb/couch_httpd_rewrite.erl461
-rw-r--r--src/couchdb/couch_httpd_show.erl164
-rw-r--r--src/couchdb/couch_httpd_stats_handlers.erl7
-rw-r--r--src/couchdb/couch_httpd_vhost.erl402
-rw-r--r--src/couchdb/couch_httpd_view.erl357
-rw-r--r--src/couchdb/couch_js_functions.hrl148
-rw-r--r--src/couchdb/couch_key_tree.erl129
-rw-r--r--src/couchdb/couch_log.erl54
-rw-r--r--src/couchdb/couch_native_process.erl53
-rw-r--r--src/couchdb/couch_os_daemons.erl363
-rw-r--r--src/couchdb/couch_os_process.erl5
-rw-r--r--src/couchdb/couch_query_servers.erl363
-rw-r--r--src/couchdb/couch_ref_counter.erl50
-rw-r--r--src/couchdb/couch_rep.erl626
-rw-r--r--src/couchdb/couch_rep_att.erl35
-rw-r--r--src/couchdb/couch_rep_changes_feed.erl279
-rw-r--r--src/couchdb/couch_rep_db_listener.erl311
-rw-r--r--src/couchdb/couch_rep_httpc.erl117
-rw-r--r--src/couchdb/couch_rep_missing_revs.erl40
-rw-r--r--src/couchdb/couch_rep_reader.erl49
-rw-r--r--src/couchdb/couch_rep_writer.erl126
-rw-r--r--src/couchdb/couch_server.erl287
-rw-r--r--src/couchdb/couch_server_sup.erl16
-rw-r--r--src/couchdb/couch_stats_collector.erl8
-rw-r--r--src/couchdb/couch_stream.erl205
-rw-r--r--src/couchdb/couch_util.erl307
-rw-r--r--src/couchdb/couch_view.erl218
-rw-r--r--src/couchdb/couch_view_group.erl209
-rw-r--r--src/couchdb/couch_view_updater.erl33
-rw-r--r--src/couchdb/couch_work_queue.erl139
-rw-r--r--src/couchdb/priv/Makefile.am2
-rw-r--r--src/couchdb/priv/couch_js/http.c4
-rw-r--r--src/couchdb/priv/stat_descriptions.cfg.in15
-rw-r--r--src/erlang-oauth/Makefile.am7
-rw-r--r--src/erlang-oauth/oauth_hmac_sha1.erl2
-rw-r--r--src/erlang-oauth/oauth_plaintext.erl2
-rw-r--r--src/ibrowse/Makefile.am2
-rw-r--r--src/ibrowse/ibrowse.app.in10
-rw-r--r--src/ibrowse/ibrowse.erl597
-rw-r--r--src/ibrowse/ibrowse_app.erl9
-rw-r--r--src/ibrowse/ibrowse_http_client.erl1882
-rw-r--r--src/ibrowse/ibrowse_lb.erl31
-rw-r--r--src/ibrowse/ibrowse_lib.erl304
-rw-r--r--src/ibrowse/ibrowse_sup.erl6
-rw-r--r--src/ibrowse/ibrowse_test.erl89
-rw-r--r--src/mochiweb/Makefile.am118
-rw-r--r--src/mochiweb/internal.hrl3
-rw-r--r--src/mochiweb/mochifmt.erl27
-rw-r--r--src/mochiweb/mochifmt_records.erl8
-rw-r--r--src/mochiweb/mochifmt_std.erl7
-rw-r--r--src/mochiweb/mochiglobal.erl107
-rw-r--r--src/mochiweb/mochihex.erl38
-rw-r--r--src/mochiweb/mochijson.erl27
-rw-r--r--src/mochiweb/mochijson2.erl225
-rw-r--r--src/mochiweb/mochilists.erl104
-rw-r--r--src/mochiweb/mochilogfile2.erl140
-rw-r--r--src/mochiweb/mochinum.erl82
-rw-r--r--src/mochiweb/mochitemp.erl310
-rw-r--r--src/mochiweb/mochiutf8.erl316
-rw-r--r--src/mochiweb/mochiweb.app.in2
-rw-r--r--src/mochiweb/mochiweb.app.src9
-rw-r--r--src/mochiweb/mochiweb.erl221
-rw-r--r--src/mochiweb/mochiweb_acceptor.erl48
-rw-r--r--src/mochiweb/mochiweb_app.erl7
-rw-r--r--src/mochiweb/mochiweb_charref.erl35
-rw-r--r--src/mochiweb/mochiweb_cookies.erl108
-rw-r--r--src/mochiweb/mochiweb_cover.erl75
-rw-r--r--src/mochiweb/mochiweb_echo.erl11
-rw-r--r--src/mochiweb/mochiweb_headers.erl180
-rw-r--r--src/mochiweb/mochiweb_html.erl668
-rw-r--r--src/mochiweb/mochiweb_http.erl179
-rw-r--r--src/mochiweb/mochiweb_io.erl46
-rw-r--r--src/mochiweb/mochiweb_mime.erl94
-rw-r--r--src/mochiweb/mochiweb_multipart.erl392
-rw-r--r--src/mochiweb/mochiweb_request.erl408
-rw-r--r--src/mochiweb/mochiweb_response.erl8
-rw-r--r--src/mochiweb/mochiweb_skel.erl41
-rw-r--r--src/mochiweb/mochiweb_socket.erl84
-rw-r--r--src/mochiweb/mochiweb_socket_server.erl190
-rw-r--r--src/mochiweb/mochiweb_sup.erl7
-rw-r--r--src/mochiweb/mochiweb_util.erl650
-rw-r--r--src/mochiweb/reloader.erl40
-rwxr-xr-xtest/etap/010-file-basics.t18
-rwxr-xr-xtest/etap/021-btree-reductions.t4
-rwxr-xr-xtest/etap/030-doc-from-json.t16
-rwxr-xr-xtest/etap/031-doc-to-json.t21
-rwxr-xr-xtest/etap/040-util.t37
-rwxr-xr-xtest/etap/050-stream.t12
-rwxr-xr-xtest/etap/060-kt-merging.t97
-rwxr-xr-xtest/etap/070-couch-db.t13
-rwxr-xr-xtest/etap/090-task-status.t4
-rwxr-xr-xtest/etap/100-ref-counter.t31
-rwxr-xr-xtest/etap/110-replication-httpc.t6
-rwxr-xr-xtest/etap/111-replication-changes-feed.t16
-rwxr-xr-xtest/etap/112-replication-missing-revs.t21
-rwxr-xr-xtest/etap/113-replication-attachment-comp.t317
-rwxr-xr-xtest/etap/121-stats-aggregates.t4
-rwxr-xr-xtest/etap/130-attachments-md5.t8
-rwxr-xr-xtest/etap/140-attachment-comp.t715
-rwxr-xr-xtest/etap/150-invalid-view-seq.t192
-rwxr-xr-xtest/etap/160-vhosts.t290
-rwxr-xr-xtest/etap/170-os-daemons.es26
-rwxr-xr-xtest/etap/170-os-daemons.t114
-rwxr-xr-xtest/etap/171-os-daemons-config.es83
-rwxr-xr-xtest/etap/171-os-daemons-config.t74
-rw-r--r--test/etap/172-os-daemon-errors.1.es22
-rwxr-xr-xtest/etap/172-os-daemon-errors.2.es16
-rwxr-xr-xtest/etap/172-os-daemon-errors.3.es17
-rwxr-xr-xtest/etap/172-os-daemon-errors.4.es17
-rwxr-xr-xtest/etap/172-os-daemon-errors.t126
-rwxr-xr-xtest/etap/173-os-daemon-cfg-register.es35
-rwxr-xr-xtest/etap/173-os-daemon-cfg-register.t98
-rw-r--r--test/etap/180-http-proxy.ini20
-rwxr-xr-xtest/etap/180-http-proxy.t357
-rw-r--r--test/etap/Makefile.am25
-rw-r--r--test/etap/test_web.erl99
-rw-r--r--test/javascript/couch_http.js13
-rw-r--r--test/view_server/query_server_spec.rb40
269 files changed, 33131 insertions, 9195 deletions
diff --git a/.gitignore b/.gitignore
index 485b17d6..47ca91b7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -95,3 +95,4 @@ couchdb.stdout
# for make cover
cover/*
+INSTALL
diff --git a/AUTHORS b/AUTHORS
index c60151bf..e0181c1d 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -13,5 +13,8 @@ documentation or developing software. Some of these people are:
* Adam Kocoloski <kocolosk@apache.org>
* Jason Davies <jasondavies@apache.org>
* Mark Hammond <mhammond@skippinet.com.au>
+ * Benoît Chesneau <benoitc@apache.org>
+ * Filipe Manana <fdmanana@apache.org>
+ * Robert Newson <rnewson@apache.org>
For a list of other credits see the `THANKS` file.
diff --git a/CHANGES b/CHANGES
index e2225783..2c222ad6 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,54 +1,319 @@
Apache CouchDB CHANGES
======================
+Version 1.1.0
+-------------
+
+This version has not been released yet.
+
+All CHANGES for 1.0.2 also apply to 1.1.0.
+
+HTTP Interface:
+
+ * Native SSL support.
+ * Added support for HTTP range requests for attachments.
+ * Added built-in filters for `_changes`: `_doc_ids` and `_design`.
+ * Added configuration option for TCP_NODELAY aka "Nagle".
+ * Allow POSTing arguments to `_changes`.
+ * Allow `keys` parameter for GET requests to views.
+ * Allow wildcards in vhosts definitions.
+ * More granular ETag support for views.
+ * More flexible URL rewriter.
+ * Added support for recognizing "Q values" and media parameters in
+ HTTP Accept headers.
+
+Externals:
+
+ * Added OS Process module to manage daemons outside of CouchDB.
+ * Added HTTP Proxy handler for more scalable externals.
+
+Replicator:
+
+ * Added `_replicator` database to manage replications.
+ * Fixed issues when an endpoint is a remote database acessible via SSL.
+ * Added support for continuous by-doc-IDs replication.
+
+Storage System:
+
+ * Multiple micro-optimizations when reading data.
+
+View Server:
+
+ * Added CommonJS support to map functions.
+ * Added `stale=update_after` query option that triggers a view update after
+ returning a `stale=ok` response.
+ * Warn about empty result caused by `startkey` and `endkey` limiting.
+ * Built-in reduce function `_sum` now accepts lists of integers as input.
+ * Added view query aliases start_key, end_key, start_key_doc_id and
+ end_key_doc_id.
+
+Version 1.0.1
+-------------
+
+Storage System:
+
+ * Fix data corruption bug COUCHDB-844. Please see
+ http://couchdb.apache.org/notice/1.0.1.html for details.
+
+Replicator:
+
+ * Added support for replication via an HTTP/HTTPS proxy.
+ * Fix pull replication of attachments from 0.11 to 1.0.x.
+ * Make the _changes feed work with non-integer seqnums.
+
+HTTP Interface:
+
+ * Expose `committed_update_seq` for monitoring purposes.
+ * Show fields saved along with _deleted=true. Allows for auditing of deletes.
+ * More robust Accept-header detection.
+
+Authentication:
+
+ * Enable basic-auth popup when required to access the server, to prevent
+ people from getting locked out.
+
+Futon:
+
+ * User interface element for querying stale (cached) views.
+
+Build and System Integration:
+
+ * Included additional source files for distribution.
+
+Version 1.0.0
+-------------
+
+Security:
+
+ * Added authentication caching, to avoid repeated opening and closing of the
+ users database for each request requiring authentication.
+
+Storage System:
+
+ * Small optimization for reordering result lists.
+ * More efficient header commits.
+ * Use O_APPEND to save lseeks.
+ * Faster implementation of pread_iolist(). Further improves performance on
+ concurrent reads.
+
+View Server:
+
+ * Faster default view collation.
+ * Added option to include update_seq in view responses.
+
+Version 0.11.2
+--------------
+
+Replicator:
+
+ * Fix bug when pushing design docs by non-admins, which was hanging the
+ replicator for no good reason.
+ * Fix bug when pulling design documents from a source that requires
+ basic-auth.
+
+HTTP Interface:
+
+ * Better error messages on invalid URL requests.
+
+Authentication:
+
+ * User documents can now be deleted by admins or the user.
+
+Security:
+
+ * Avoid potential DOS attack by guarding all creation of atoms.
+
+Futon:
+
+ * Add some Futon files that were missing from the Makefile.
+
+Version 0.11.1
+--------------
+
+HTTP Interface:
+
+ * Mask passwords in active tasks and logging.
+ * Update mochijson2 to allow output of BigNums not in float form.
+ * Added support for X-HTTP-METHOD-OVERRIDE.
+ * Better error message for database names.
+ * Disable jsonp by default.
+ * Accept gzip encoded standalone attachments.
+ * Made max_concurrent_connections configurable.
+ * Made changes API more robust.
+ * Send newly generated document rev to callers of an update function.
+
+Futon:
+
+ * Use "expando links" for over-long document values in Futon.
+ * Added continuous replication option.
+ * Added option to replicating test results anonymously to a community
+ CouchDB instance.
+ * Allow creation and deletion of config entries.
+ * Fixed display issues with doc ids that have escaped characters.
+ * Fixed various UI issues.
+
+Build and System Integration:
+
+ * Output of `couchdb --help` has been improved.
+ * Fixed compatibility with the Erlang R14 series.
+ * Fixed warnings on Linux builds.
+ * Fixed build error when aclocal needs to be called during the build.
+ * Require ICU 4.3.1.
+ * Fixed compatibility with Solaris.
+
+Security:
+
+ * Added authentication redirect URL to log in clients.
+ * Fixed query parameter encoding issue in oauth.js.
+ * Made authentication timeout configurable.
+ * Temporary views are now admin-only resources.
+
+Storage System:
+
+ * Don't require a revpos for attachment stubs.
+ * Added checking to ensure when a revpos is sent with an attachment stub,
+ it's correct.
+ * Make file deletions async to avoid pauses during compaction and db
+ deletion.
+ * Fixed for wrong offset when writing headers and converting them to blocks,
+ only triggered when header is larger than 4k.
+ * Preserve _revs_limit and instance_start_time after compaction.
+
+Configuration System:
+
+ * Fixed timeout with large .ini files.
+
+JavaScript Clients:
+
+ * Added tests for couch.js and jquery.couch.js
+ * Added changes handler to jquery.couch.js.
+ * Added cache busting to jquery.couch.js if the user agent is msie.
+ * Added support for multi-document-fetch (via _all_docs) to jquery.couch.js.
+ * Added attachment versioning to jquery.couch.js.
+ * Added option to control ensure_full_commit to jquery.couch.js.
+ * Added list functionality to jquery.couch.js.
+ * Fixed issues where bulkSave() wasn't sending a POST body.
+
+View Server:
+
+ * Provide a UUID to update functions (and all other functions) that they can
+ use to create new docs.
+ * Upgrade CommonJS modules support to 1.1.1.
+ * Fixed erlang filter funs and normalize filter fun API.
+ * Fixed hang in view shutdown.
+
+Log System:
+
+ * Log HEAD requests as HEAD, not GET.
+ * Keep massive JSON blobs out of the error log.
+ * Fixed a timeout issue.
+
+Replication System:
+
+ * Refactored various internal APIs related to attachment streaming.
+ * Fixed hanging replication.
+ * Fixed keepalive issue.
+
+URL Rewriter & Vhosts:
+
+ * Allow more complex keys in rewriter.
+ * Allow global rewrites so system defaults are available in vhosts.
+ * Allow isolation of databases with vhosts.
+ * Fix issue with passing variables to query parameters.
+
+Test Suite:
+
+ * Made the test suite overall more reliable.
+
Version 0.11.0
--------------
-This version has not been released yet.
+Security:
-Build and System Integration
+ * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability.
+ * Added default cookie-authentication and users database.
+ * Added Futon user interface for user signup and login.
+ * Added per-database reader access control lists.
+ * Added per-database security object for configuration data in validation
+ functions.
+ * Added proxy authentication handler
- * Added support for building a Windows installer as part of 'make dist'.
- * Bug fix for building couch.app's module list.
+HTTP Interface:
+
+ * Provide Content-MD5 header support for attachments.
+ * Added URL Rewriter handler.
+ * Added virtual host handling.
-View Server
+View Server:
* Added optional 'raw' binary collation for faster view builds where Unicode
collation is not important.
* Improved view index build time by reducing ICU collation callouts.
* Improved view information objects.
* Bug fix for partial updates during view builds.
+ * Move query server to a design-doc based protocol.
+ * Use json2.js for JSON serialization for compatiblity with native JSON.
+ * Major refactoring of couchjs to lay the groundwork for disabling cURL
+ support. The new HTTP interaction acts like a synchronous XHR. Example usage
+ of the new system is in the JavaScript CLI test runner.
-Replication
+Replication:
* Added option to implicitly create replication target databases.
+ * Avoid leaking file descriptors on automatic replication restarts.
+ * Added option to replicate a list of documents by id.
+ * Allow continuous replication to be cancelled.
-Storage System
+Storage System:
* Adds batching of multiple updating requests, to improve throughput with many
writers. Removed the now redundant couch_batch_save module.
+ * Adds configurable compression of attachments.
-Runtime Statistics
+Runtime Statistics:
+ * Statistics are now calculated for a moving window instead of non-overlapping
+ timeframes.
* Fixed a problem with statistics timers and system sleep.
+ * Moved statistic names to a term file in the priv directory.
-Futon
+Futon:
* Added a button for view compaction.
+ * JSON strings are now displayed as-is in the document view, without the escaping of
+ new-lines and quotes. That dramatically improves readability of multi-line
+ strings.
+ * Same goes for editing of JSON string values. When a change to a field value is
+ submitted, and the value is not valid JSON it is assumed to be a string. This
+ improves editing of multi-line strings a lot.
+ * Hitting tab in textareas no longer moves focus to the next form field, but simply
+ inserts a tab character at the current caret position.
+ * Fixed some font declarations.
+
+Build and System Integration:
+
+ * Updated and improved source documentation.
+ * Fixed distribution preparation for building on Mac OS X.
+ * Added support for building a Windows installer as part of 'make dist'.
+ * Bug fix for building couch.app's module list.
+ * ETap tests are now run during make distcheck. This included a number of
+ updates to the build system to properly support VPATH builds.
+ * Gavin McDonald setup a build-bot instance. More info can be found at
+ http://ci.apache.org/buildbot.html
Version 0.10.1
--------------
-Replicator
+Replicator:
* Stability enhancements regarding redirects, timeouts, OAuth.
-Query Server
+Query Server:
* Avoid process leaks
* Allow list and view to span languages
-Stats
+Stats:
* Eliminate new process flood on system wake
@@ -59,31 +324,37 @@ Build and System Integration:
Version 0.10.0
--------------
-Storage Format
+Storage Format:
* Add move headers with checksums to the end of database files for extra robust
storage and faster storage.
-View Server
+View Server:
* Added native Erlang views for high-performance applications.
+HTTP Interface:
+
+ * Added optional cookie-based authentication handler.
+ * Added optional two-legged OAuth authentication handler.
+
Build and System Integration:
* Changed `couchdb` script configuration options.
* Added default.d and local.d configuration directories to load sequence.
-HTTP Interface:
-
- * Added optional cookie-based authentication handler.
- * Added optional two-legged OAuth authentication handler.
Version 0.9.2
-------------
+Replication:
+
+ * Fix replication with 0.10 servers initiated by an 0.9 server (COUCHDB-559).
+
+Build and System Integration:
+
* Remove branch callbacks to allow building couchjs against newer versions of
Spidermonkey.
- * Fix replication with 0.10 servers initiated by an 0.9 server (COUCHDB-559).
Version 0.9.1
-------------
@@ -111,7 +382,7 @@ External Handlers:
* Fix POST requests.
-Futon Utility Client:
+Futon:
* Redirect when loading a deleted view URI from the cookie.
@@ -256,7 +527,7 @@ Build and System Integration:
* Updated `sudo` example in `README` to use the `-i` option, this fixes
problems when invoking from a directory the `couchdb` user cannot access.
-Futon Utility Client:
+Futon:
* The view selector dropdown should now work in Opera and Internet Explorer
even when it includes optgroups for design documents. (COUCHDB-81)
@@ -334,7 +605,7 @@ Build and System Integration:
* The `couchdb` and `couchjs` scripts have been improved for portability.
* The build and system integration have been improved for portability.
-Futon Utility Client:
+Futon:
* When adding a field to a document, Futon now just adds a field with an
autogenerated name instead of prompting for the name with a dialog. The name
diff --git a/DEVELOPERS b/DEVELOPERS
new file mode 100644
index 00000000..a7a6926e
--- /dev/null
+++ b/DEVELOPERS
@@ -0,0 +1,95 @@
+Apache CouchDB DEVELOPERS
+=========================
+
+Only follow these instructions if you are building from a source checkout.
+
+If you're unsure what this means, ignore this document.
+
+Dependencies
+------------
+
+You will need the following installed:
+
+ * GNU Automake (>=1.6.3) (http://www.gnu.org/software/automake/)
+ * GNU Autoconf (>=2.59) (http://www.gnu.org/software/autoconf/)
+ * GNU Libtool (http://www.gnu.org/software/libtool/)
+ * GNU help2man (http://www.gnu.org/software/help2man/)
+
+The `help2man` tool is optional, but will generate `man` pages for you.
+
+Debian-based (inc. Ubuntu) Systems
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You can install the dependencies by running:
+
+ apt-get install automake autoconf libtool help2man
+
+Be sure to update the version numbers to match your system's available packages.
+
+Mac OS X
+~~~~~~~~
+
+You can install the dependencies by running:
+
+ port install automake autoconf libtool help2man
+
+You will need MacPorts installed to use the `port` command.
+
+Bootstrapping
+-------------
+
+Bootstrap the pristine source by running:
+
+ ./bootstrap
+
+You must repeat this step every time you update your source checkout.
+
+Testing
+-------
+
+Check the test suite by running:
+
+ make check
+
+Generate a coverage report by running:
+
+ make cover
+
+Please report any problems to the developer's mailing list.
+
+Releasing
+---------
+
+Unix-like Systems
+~~~~~~~~~~~~~~~~~
+
+Configure the source by running:
+
+ ./configure
+
+Prepare the release artefacts by running:
+
+ make distcheck
+
+You can prepare signed release artefacts by running:
+
+ make distsign
+
+The release artefacts can be found in the root source directory.
+
+Microsoft Windows
+~~~~~~~~~~~~~~~~~
+
+Configure the source by running:
+
+ ./configure
+
+Prepare the release artefacts by running:
+
+ make dist
+
+The release artefacts can be found in the `etc/windows` directory.
+
+Until the build system has been improved, you must make sure that you run this
+command from a clean source checkout. If you do not, your test database and log
+files will be bundled up in the release artefact.
diff --git a/INSTALL.Unix b/INSTALL.Unix
new file mode 100644
index 00000000..768e3846
--- /dev/null
+++ b/INSTALL.Unix
@@ -0,0 +1,231 @@
+Apache CouchDB README.Unix
+==========================
+
+A high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu.
+
+Dependencies
+------------
+
+You will need the following installed:
+
+ * Erlang OTP (>=R12B5) (http://erlang.org/)
+ * ICU (http://icu.sourceforge.net/)
+ * OpenSSL (http://www.openssl.org/)
+ * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/)
+ * libcurl (http://curl.haxx.se/libcurl/)
+ * GNU Make (http://www.gnu.org/software/make/)
+ * GNU Compiler Collection (http://gcc.gnu.org/)
+
+It is recommended that you install Erlang OTP R12B-5 or above where possible.
+
+Ubuntu
+~~~~~~
+
+See
+
+ http://wiki.apache.org/couchdb/Installing_on_Ubuntu
+
+for updated instructions on how to install on Ubuntu.
+
+Debian-based Systems
+~~~~~~~~~~~~~~~~~~~~
+
+You can install the build tools by running:
+
+ sudo apt-get install build-essential
+
+You can install the other dependencies by running:
+
+ sudo apt-get install erlang libicu-dev libmozjs-dev libcurl4-openssl-dev
+
+Be sure to update the version numbers to match your system's available packages.
+
+Mac OS X
+~~~~~~~~
+
+You can install the build tools by running:
+
+ open /Applications/Installers/Xcode\ Tools/XcodeTools.mpkg
+
+You can install the other dependencies by running:
+
+ sudo port install icu erlang spidermonkey curl
+
+You will need MacPorts installed to use the `port` command.
+
+Installing
+----------
+
+Once you have satisfied the dependencies you should run:
+
+ ./configure
+
+This script will configure CouchDB to be installed into `/usr/local` by default.
+
+If you wish to customise the installation, pass `--help` to this script.
+
+If everything was successful you should see the following message:
+
+ You have configured Apache CouchDB, time to relax.
+
+Relax.
+
+To install CouchDB you should run:
+
+ make && sudo make install
+
+You only need to use `sudo` if you're installing into a system directory.
+
+Try `gmake` if `make` is giving you any problems.
+
+If everything was successful you should see the following message:
+
+ You have installed Apache CouchDB, time to relax.
+
+Relax.
+
+First Run
+---------
+
+You can start the CouchDB server by running:
+
+ sudo -i -u couchdb couchdb
+
+This uses the `sudo` command to run the `couchdb` command as the `couchdb` user.
+
+When CouchDB starts it should eventually display the following message:
+
+ Apache CouchDB has started, time to relax.
+
+Relax.
+
+To check that everything has worked, point your web browser to:
+
+ http://127.0.0.1:5984/_utils/index.html
+
+From here you should run the test suite.
+
+Security Considerations
+-----------------------
+
+You should create a special `couchdb` user for CouchDB.
+
+On many Unix-like systems you can run:
+
+ adduser --system \
+ --home /usr/local/var/lib/couchdb \
+ --no-create-home \
+ --shell /bin/bash \
+ --group --gecos \
+ "CouchDB Administrator" couchdb
+
+On Mac OS X you can use the Workgroup Manager to create users:
+
+ http://www.apple.com/support/downloads/serveradmintools1047.html
+
+You must make sure that:
+
+ * The user has a working POSIX shell
+
+ * The user's home directory is `/usr/local/var/lib/couchdb`
+
+You can test this by:
+
+ * Trying to log in as the `couchdb` user
+
+ * Running `pwd` and checking the present working directory
+
+Change the ownership of the CouchDB directories by running:
+
+ chown -R couchdb:couchdb /usr/local/etc/couchdb
+ chown -R couchdb:couchdb /usr/local/var/lib/couchdb
+ chown -R couchdb:couchdb /usr/local/var/log/couchdb
+ chown -R couchdb:couchdb /usr/local/var/run/couchdb
+
+Change the permission of the CouchDB directories by running:
+
+ chmod 0770 /usr/local/etc/couchdb
+ chmod 0770 /usr/local/var/lib/couchdb
+ chmod 0770 /usr/local/var/log/couchdb
+ chmod 0770 /usr/local/var/run/couchdb
+
+Running as a Daemon
+-------------------
+
+SysV/BSD-style Systems
+~~~~~~~~~~~~~~~~~~~~~~
+
+You can use the `couchdb` init script to control the CouchDB daemon.
+
+On SysV-style systems, the init script will be installed into:
+
+ /usr/local/etc/init.d
+
+On BSD-style systems, the init script will be installed into:
+
+ /usr/local/etc/rc.d
+
+We use the `[init.d|rc.d]` notation to refer to both of these directories.
+
+You can control the CouchDB daemon by running:
+
+ /usr/local/etc/[init.d|rc.d]/couchdb [start|stop|restart|status]
+
+If you wish to configure how the init script works, you can edit:
+
+ /usr/local/etc/default/couchdb
+
+Comment out the `COUCHDB_USER` setting if you're running as a non-superuser.
+
+To start the daemon on boot, copy the init script to:
+
+ /etc/[init.d|rc.d]
+
+You should then configure your system to run the init script automatically.
+
+You may be able to run:
+
+ sudo update-rc.d couchdb defaults
+
+If this fails, consult your system documentation for more information.
+
+A `logrotate` configuration is installed into:
+
+ /usr/local/etc/logrotate.d/couchdb
+
+Consult your `logrotate` documentation for more information.
+
+It is critical that the CouchDB logs are rotated so as not to fill your disk.
+
+Mac OS X
+~~~~~~~~
+
+You can use the `launchctl` command to control the CouchDB daemon.
+
+You can load the configuration by running:
+
+ sudo launchctl load \
+ /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist
+
+You can stop the CouchDB daemon by running:
+
+ sudo launchctl unload \
+ /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist
+
+You can start CouchDB by running:
+
+ sudo launchctl start org.apache.couchdb
+
+You can restart CouchDB by running:
+
+ sudo launchctl stop org.apache.couchdb
+
+You can edit the launchd configuration by running:
+
+ open /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist
+
+To start the daemon on boot, copy the configuration file to:
+
+ /Library/LaunchDaemons
+
+Consult your system documentation for more information.
diff --git a/INSTALL.Windows b/INSTALL.Windows
new file mode 100644
index 00000000..5c4a9587
--- /dev/null
+++ b/INSTALL.Windows
@@ -0,0 +1,148 @@
+Apache CouchDB README.Windows
+==============================
+
+For a high-level guide to Microsoft Windows.
+
+Dependencies
+------------
+
+You will need the following installed:
+
+ * Erlang OTP (>=R12B5) (http://erlang.org/)
+ * ICU (http://icu.sourceforge.net/)
+ * OpenSSL (http://www.openssl.org/)
+ * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/)
+ * libcurl (http://curl.haxx.se/libcurl/)
+ * Cygwin (http://www.cygwin.com/)
+ * Visual Studio 2008 (http://msdn.microsoft.com/en-gb/vstudio/default.aspx)
+
+General Notes
+-------------
+
+ * When installing Erlang, you must build it from source.
+
+ The CouchDB build makes use of a number of the Erlang build scripts.
+
+ * When installing ICU, select the binaries built with Visual Studio 2008.
+
+ * When installing Cygwin, be sure to select all the `development` tools.
+
+ * When installing libcurl, be sure to install by hand.
+
+ The Cygwin binaries are incompatible and will not work with Erlang.
+
+Setting Up Cygwin
+-----------------
+
+Before starting any Cygwin terminals, run:
+
+ set CYGWIN=nontsec
+
+To set up your environment, run:
+
+ [VS_BIN]/vcvars32.bat
+
+Replace [VS_BIN] with the path to your Visual Studio `bin` directory.
+
+You must check that:
+
+ * The `which link` command points to the Microsoft linker.
+
+ * The `which cl` command points to the Microsoft compiler.
+
+If you do not do this, the ones found in `/usr/bin` may be used instead.
+
+Building Erlang
+---------------
+
+You must include OpenSSL.
+
+However, you can skip the GUI tools by running:
+
+ echo "skipping gs" > lib/gs/SKIP
+
+ echo "skipping ic" > lib/ic/SKIP
+
+Follow the rest of the Erlang instructions as described.
+
+After running:
+
+ ./otp_build release -a
+
+You should run:
+
+ ./release/win32/Install.exe
+
+This will set up the release/win32/bin directory correctly.
+
+To set up your environment for building CouchDB, run:
+
+ eval `./otp_build env_win32`
+
+To set up the `ERL_TOP` environment variable, run:
+
+ export ERL_TOP=[ERL_TOP]
+
+Replace `[ERL_TOP]` with the Erlang source directory name.
+
+Remember to use `/cygdrive/c/` instead of `c:/` as the directory prefix.
+
+To set up your path, run:
+
+ export PATH=$ERL_TOP/release/win32/erts-5.7.2/bin:$PATH
+
+If everything was successful, you should be ready to build CouchDB.
+
+Relax.
+
+Building CouchDB
+----------------
+
+Once you have satisfied the dependencies you should run:
+
+ ./configure \
+ --with-js-include=/cygdrive/c/path_to_seamonkey_include \
+ --with-js-lib=/cygdrive/c/path_to_seamonkey_lib \
+ --with-win32-icu-binaries=/cygdrive/c/path_to_icu_binaries_root \
+ --with-erlang=$ERL_TOP/release/win32/usr/include \
+ --with-win32-curl=/cygdrive/c/path/to/curl/root/directory \
+ --with-openssl-bin-dir=/cygdrive/c/openssl/bin \
+ --with-msvc-redist-dir=/cygdrive/c/dir/with/vcredist_platform_executable \
+ --prefix=$ERL_TOP/release/win32
+
+This command could take a while to complete.
+
+If everything was successful you should see the following message:
+
+ You have configured Apache CouchDB, time to relax.
+
+Relax.
+
+To install CouchDB you should run:
+
+ make install
+
+If everything was successful you should see the following message:
+
+ You have installed Apache CouchDB, time to relax.
+
+Relax.
+
+First Run
+---------
+
+You can start the CouchDB server by running:
+
+ $ERL_TOP/release/win32/bin/couchdb.bat
+
+When CouchDB starts it should eventually display the following message:
+
+ Apache CouchDB has started, time to relax.
+
+Relax.
+
+To check that everything has worked, point your web browser to:
+
+ http://127.0.0.1:5984/_utils/index.html
+
+From here you should run the test suite.
diff --git a/Makefile.am b/Makefile.am
index fff5648b..8e16baf6 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -12,11 +12,38 @@
SUBDIRS = bin etc src share test var utils
-localdoc_DATA = AUTHORS.gz BUGS.gz CHANGES.gz NEWS.gz README.gz THANKS.gz
+ACLOCAL_AMFLAGS = -I m4
+
+localdoc_DATA = \
+ AUTHORS.gz \
+ BUGS.gz \
+ CHANGES.gz \
+ DEVELOPERS.gz \
+ INSTALL.gz \
+ INSTALL.Unix.gz \
+ INSTALL.Windows.gz \
+ LICENSE.gz \
+ NEWS.gz \
+ NOTICE.gz \
+ README.gz \
+ THANKS.gz
DISTCLEANFILES = $(localdoc_DATA)
-EXTRA_DIST = AUTHORS BUGS CHANGES LICENSE NEWS NOTICE README THANKS license.skip
+EXTRA_DIST = \
+ AUTHORS \
+ BUGS \
+ CHANGES \
+ DEVELOPERS \
+ INSTALL \
+ INSTALL.Unix \
+ INSTALL.Windows \
+ LICENSE \
+ NEWS \
+ NOTICE \
+ README \
+ THANKS \
+ license.skip
AUTHORS.gz: $(top_srcdir)/AUTHORS
-gzip -9 < $< > $@
@@ -27,9 +54,27 @@ BUGS.gz: $(top_srcdir)/BUGS
CHANGES.gz: $(top_srcdir)/CHANGES
-gzip -9 < $< > $@
+DEVELOPERS.gz: $(top_srcdir)/DEVELOPERS
+ -gzip -9 < $< > $@
+
+INSTALL.gz: $(top_srcdir)/INSTALL
+ -gzip -9 < $< > $@
+
+INSTALL.Unix.gz: $(top_srcdir)/INSTALL.Unix
+ -gzip -9 < $< > $@
+
+INSTALL.Windows.gz: $(top_srcdir)/INSTALL.Windows
+ -gzip -9 < $< > $@
+
+LICENSE.gz: $(top_srcdir)/LICENSE
+ -gzip -9 < $< > $@
+
NEWS.gz: $(top_srcdir)/NEWS
-gzip -9 < $< > $@
+NOTICE.gz: $(top_srcdir)/NOTICE
+ -gzip -9 < $< > $@
+
README.gz: $(top_srcdir)/README
-gzip -9 < $< > $@
@@ -59,7 +104,13 @@ dev: all
mkdir -p $(top_builddir)/tmp/log
mkdir -p $(top_builddir)/tmp/run/couchdb
+install-data-hook:
+ @echo
+ @echo "You have installed Apache CouchDB, time to relax."
+
distclean-local:
+ rm -fr $(top_builddir)/etc/couchdb/default.d
+ rm -fr $(top_builddir)/etc/couchdb/local.d
rm -fr $(top_builddir)/tmp
.PHONY: local-clean
@@ -74,7 +125,11 @@ local-clean: maintainer-clean
rm -f $(top_srcdir)/test/etap/temp.*
rm -f $(top_srcdir)/*.tar.gz
rm -f $(top_srcdir)/*.tar.gz.*
- find $(top_srcdir) -name Makefile.in -exec rm {} \;
+ find $(top_srcdir) -name Makefile.in -exec rm -f {} \;
+
+dist-hook:
+ find $(top_srcdir) -type f -name "._*" -exec rm -f {} \;
+ find $(top_builddir) -type f -name "._*" -exec rm -f {} \;
distcheck-hook:
grep -rL 'http://www.apache.org/licenses/LICENSE-2.0' * \
diff --git a/NEWS b/NEWS
index ad4ac3e3..ab9c2cdb 100644
--- a/NEWS
+++ b/NEWS
@@ -1,17 +1,118 @@
Apache CouchDB NEWS
===================
-For details about backwards incompatible changes, please see:
+For details about backwards incompatible changes, see:
http://wiki.apache.org/couchdb/Breaking_changes
Each release section notes when backwards incompatible changes have been made.
+Version 1.1.0
+-------------
+
+This version has not been released yet.
+
+All NEWS for 1.0.2 also apply to 1.1.0.
+
+ * Native SSL support.
+ * Added support for HTTP range requests for attachments.
+ * Added built-in filters for `_changes`: `_doc_ids` and `_design`.
+ * Added configuration option for TCP_NODELAY aka "Nagle".
+ * Allow wildcards in vhosts definitions.
+ * More granular ETag support for views.
+ * More flexible URL rewriter.
+ * Added OS Process module to manage daemons outside of CouchDB.
+ * Added HTTP Proxy handler for more scalable externals.
+ * Added `_replicator` database to manage replications.
+ * Multiple micro-optimizations when reading data.
+ * Added CommonJS support to map functions.
+ * Added `stale=update_after` query option that triggers a view update after
+ returning a `stale=ok` response.
+
+Version 1.0.1
+-------------
+
+ * Fix data corruption bug COUCHDB-844. Please see
+ http://couchdb.apache.org/notice/1.0.1.html for details.
+ * Added support for replication via an HTTP/HTTPS proxy.
+ * Fixed various replicator bugs for interop with older CouchDB versions.
+ * Show fields saved along with _deleted=true. Allows for auditing of deletes.
+ * Enable basic-auth popup when required to access the server, to prevent
+ people from getting locked out.
+ * User interface element for querying stale (cached) views.
+
+Version 1.0.0
+-------------
+
+ * More efficient header commits.
+ * Use O_APPEND to save lseeks.
+ * Faster implementation of pread_iolist(). Further improves performance on
+ concurrent reads.
+ * Added authentication caching
+ * Faster default view collation.
+ * Added option to include update_seq in view responses.
+
+Version 0.11.2
+--------------
+
+ * Replicator buxfixes for replicating design documents from secured databases.
+ * Better error messages on invalid URL requests.
+ * User documents can now be deleted by admins or the user.
+ * Avoid potential DOS attack by guarding all creation of atoms.
+ * Some Futon and JavaScript library bugfixes.
+ * Fixed CVE-2010-2234: Apache CouchDB Cross Site Request Forgery Attack
+
+Version 0.11.1
+--------------
+
+ * Mask passwords in active tasks and logging.
+ * Update mochijson2 to allow output of BigNums not in float form.
+ * Added support for X-HTTP-METHOD-OVERRIDE.
+ * Disable jsonp by default.
+ * Accept gzip encoded standalone attachments.
+ * Made max_concurrent_connections configurable.
+ * Added continuous replication option to Futon.
+ * Added option to replicating test results anonymously to a community
+ CouchDB instance.
+ * Allow creation and deletion of config entries in Futon.
+ * Fixed various UI issues in Futon.
+ * Fixed compatibility with the Erlang R14 series.
+ * Fixed warnings on Linux builds.
+ * Fixed build error when aclocal needs to be called during the build.
+ * Require ICU 4.3.1.
+ * Fixed compatibility with Solaris.
+ * Added authentication redirect URL to log in clients.
+ * Added authentication caching, to avoid repeated opening and closing of the
+ users database for each request requiring authentication.
+ * Made authentication timeout configurable.
+ * Temporary views are now admin-only resources.
+ * Don't require a revpos for attachment stubs.
+ * Make file deletions async to avoid pauses during compaction and db
+ deletion.
+ * Fixed for wrong offset when writing headers and converting them to blocks,
+ only triggered when header is larger than 4k.
+ * Preserve _revs_limit and instance_start_time after compaction.
+ * Fixed timeout with large .ini files.
+ * Added tests for couch.js and jquery.couch.js
+ * Added various API features to jquery.couch.js
+ * Faster default view collation.
+ * Upgrade CommonJS modules support to 1.1.1.
+ * Added option to include update_seq in view responses.
+ * Fixed erlang filter funs and normalize filter fun API.
+ * Fixed hang in view shutdown.
+ * Refactored various internal APIs related to attachment streaming.
+ * Fixed hanging replication.
+ * Fixed keepalive issue.
+ * Allow global rewrites so system defaults are available in vhosts.
+ * Allow isolation of databases with vhosts.
+ * Made the test suite overall more reliable.
+
Version 0.11.0
--------------
-This version has not been released yet.
+This version is a feature-freeze release candidate for Apache CouchDB 1.0.
+ * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability.
* Added support for building a Windows installer as part of 'make dist'.
* Added optional 'raw' binary collation for faster view builds where Unicode
collation is not important.
@@ -21,8 +122,11 @@ This version has not been released yet.
* Bug fix for partial updates during view builds.
* Bug fix for building couch.app's module list.
* Fixed a problem with statistics timers and system sleep.
+ * Improved the statistics calculations to use an online moving window
+ algorithm.
* Adds batching of multiple updating requests, to improve throughput with many
- writers. Removed the now redundant couch_batch_save module.
+ writers.
+ * Removed the now redundant couch_batch_save module.
* Bug fix for premature termination of chunked responses.
* Improved speed and concurrency of config lookups.
* Fixed an edge case for HTTP redirects during replication.
@@ -30,6 +134,23 @@ This version has not been released yet.
* Fixed query parameter handling in OAuth'd replication.
* Fixed a bug preventing mixing languages with lists and views.
* Avoid OS process leaks in lists.
+ * Avoid leaking file descriptors on automatic replication restarts.
+ * Various improvements to the Futon UI.
+ * Provide Content-MD5 header support for attachments.
+ * Added default cookie-authentication and users db.
+ * Added per-db reader access control lists.
+ * Added per-db security object for configuration data in validation functions.
+ * Added URL Rewriter handler.
+ * Added proxy authentication handler.
+ * Added ability to replicate documents by id.
+ * Added virtual host handling.
+ * Uses json2.js for JSON serialization compatiblity with native JSON.
+ * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability.
+
+Version 0.10.2
+--------------
+
+ * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability.
Version 0.10.1
--------------
@@ -170,8 +291,8 @@ Version 0.3.0
* CouchDB now fully supports Unicode and locale specific collation via the ICU
library, both in the Fabric engine and computed tables.
* The `in` operator has been added to Fabric.
- * The `startdoc` query string variable specifies the starting document to use if
- there are multiple rows with identical startkeys.
+ * The `startdoc` query string variable specifies the starting document to use
+ if there are multiple rows with identical startkeys.
* The `skip` query string variable specifies the number of rows to skip before
returning results. The `skip` value must be a positive integer. If used with
a `count` variable the skipped rows aren't counted as output.
diff --git a/NOTICE b/NOTICE
index 0934930c..d547e55f 100644
--- a/NOTICE
+++ b/NOTICE
@@ -16,7 +16,7 @@ This product also includes the following third-party components:
* jQuery (http://jquery.com/)
- Copyright 2009, John Resig
+ Copyright 2010, John Resig
* json2.js (http://www.json.org/)
@@ -41,3 +41,11 @@ This product also includes the following third-party components:
* mimeparse.js (http://code.google.com/p/mimeparse/)
Copyright 2009, Chris Anderson <jchris@apache.org>
+
+ * base64.js
+
+ Copyright 1999, Masanao Izumo <iz@onicos.co.jp>
+
+* jspec.js (http://visionmedia.github.com/jspec/)
+
+ Copyright 2010 TJ Holowaychuk <tj@vision-media.ca>
diff --git a/README b/README
index 3366c8ec..540226d3 100644
--- a/README
+++ b/README
@@ -1,483 +1,81 @@
Apache CouchDB README
=====================
-Apache CouchDB is beta software and still under heavy development. Please be
-aware that important areas such as the public API or internal database format
-may see backwards incompatible changes between versions.
+Installation
+------------
-Building From Checkout
-----------------------
+For a low-level guide, see:
-You can skip this section if you are installing from a release tarball.
+ INSTALL
-Dependencies
-~~~~~~~~~~~~
+For a high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu, see:
-To build Apache CouchDB from checkout you need some of the following installed:
+ INSTALL.Unix
- * GNU Automake (>=1.6.3) (http://www.gnu.org/software/automake/)
- * GNU Autoconf (>=2.59) (http://www.gnu.org/software/autoconf/)
- * GNU Libtool (http://www.gnu.org/software/libtool/)
- * GNU help2man (http://www.gnu.org/software/help2man/)
+For a high-level guide to Microsoft Windows, see:
-Debian-based (inc. Ubuntu) Systems
-++++++++++++++++++++++++++++++++++
+ INSTALL.Windows
-You can install the dependencies by running:
+Follow the proper instructions to get CouchDB installed on your system.
- apt-get install automake autoconf libtool help2man
-
-Mac OS X
-++++++++
-
-You can install the dependencies using MacPorts by running:
-
- port install automake autoconf libtool help2man
-
-Bootstrapping
-~~~~~~~~~~~~~
-
-Note: You must repeat this step every time you update your checkout.
-
-Bootstrap the pristine source by running:
-
- ./bootstrap
-
-Installation and First Run
---------------------------
-
-Unix-like Operating Systems (inc. Mac OS X)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Dependencies
-^^^^^^^^^^^^
-
-To build and install Apache CouchDB you will need the following installed:
-
- * Erlang OTP (>=R12B5) (http://erlang.org/)
- * ICU (http://icu.sourceforge.net/)
- * OpenSSL (http://www.openssl.org/)
- * Mozilla SpiderMonkey (http://www.mozilla.org/js/spidermonkey/)
- * libcurl (http://curl.haxx.se/libcurl/)
- * GNU Make (http://www.gnu.org/software/make/)
- * GNU Compiler Collection (http://gcc.gnu.org/)
-
-It is recommended that you install Erlang OTP R12B-5 or above where possible.
-
-Debian-based (inc. Ubuntu) Systems
-++++++++++++++++++++++++++++++++++
-
-You can install the dependencies by running:
-
- apt-get install build-essential erlang libicu-dev libmozjs-dev libcurl4-openssl-dev
-
-If you get an error regarding the `libicu38` or `libicu-dev` be sure to check
-the version used by your distribution (using `apt-cache search libicu`) and
-install those packages instead. `libcurl4-openssl-dev` is the current version of
-`libcurl-dev` supplied by Ubuntu. You may need to specify an alternate package
-name for libcurl bindings.
-
-Mac OS X
-++++++++
-
-To install GNU Make and the GNU Compiler Collection on Mac OS X you should
-install the Xcode Tools metapackage by running:
-
- open /Applications/Installers/Xcode\ Tools/XcodeTools.mpkg
-
-You can install the dependencies using MacPorts by running:
-
- port install icu erlang spidermonkey curl
-
-Installing
-^^^^^^^^^^
-
-Once you have satisfied the dependencies you should run:
-
- ./configure
-
-Note: Apache CouchDB is installed into `/usr/local` by default. If you want to
-change where Apache CouchDB is installed (or where to find Erlang) be sure to
-read the output from running the `./configure --help` command.
-
-Note: All the examples assume you have installed into `/usr/local`.
-
-If everything was successful you should see the following message:
-
- You have configured Apache CouchDB, time to relax.
-
-Relax.
-
-To install Apache CouchDB you should then run the following command:
-
- make && sudo make install
-
-Note: The use of the `sudo` command is only required if you are installing into
-a system owned directory. You do not need to do this if you are installing
-elsewhere, such as your home directory.
-
-If you are having problems running `make` you may want to try running `gmake` if
-this is available on your system.
-
-More options can be found by reading the `INSTALL` file.
-
-Security Considerations
-^^^^^^^^^^^^^^^^^^^^^^^
-
-It is not advisable to run Apache CouchDB as the superuser. We strongly
-recommend that you create a specific user to run Apache CouchDB and own the
-data/log directories.
-
-You can use whatever tool your system provides to create a new `couchdb` user.
-
-On many Unix-like systems you can run:
-
- adduser --system --home /usr/local/var/lib/couchdb --no-create-home \
- --shell /bin/bash --group --gecos "CouchDB Administrator" couchdb
-
-Mac OS X provides the standard Accounts option from the System Preferences
-application or you can optionally use the Workgroup Manager application which
-can be downloaded as part of the Server Admin Tools:
-
- http://www.apple.com/support/downloads/serveradmintools1047.html
-
-You should make sure that the `couchdb` user has a working POSIX shell and set
-the home directory to `/usr/local/var/lib/couchdb` which is the Apache CouchDB
-database directory.
-
-Change the ownership of the Apache CouchDB directories by running:
-
- chown -R couchdb:couchdb /usr/local/etc/couchdb
- chown -R couchdb:couchdb /usr/local/var/lib/couchdb
- chown -R couchdb:couchdb /usr/local/var/log/couchdb
- chown -R couchdb:couchdb /usr/local/var/run/couchdb
-
-Change the permission of the Apache CouchDB directories by running:
-
- chmod -R 0770 /usr/local/etc/couchdb
- chmod -R 0770 /usr/local/var/lib/couchdb
- chmod -R 0770 /usr/local/var/log/couchdb
- chmod -R 0770 /usr/local/var/run/couchdb
-
-Running Manually
-^^^^^^^^^^^^^^^^
-
-You can start the Apache CouchDB server by running:
-
- sudo -i -u couchdb couchdb -b
-
-This uses the `sudo` command to run the `couchdb` command as the `couchdb` user.
-
-When Apache CouchDB starts it should eventually display the following message:
-
- Apache CouchDB has started, time to relax.
-
-Relax.
-
-To check that everything has worked, point your web browser to:
-
- http://127.0.0.1:5984/_utils/index.html
-
-From here you should run the test suite.
+If you're having problems, skip to the next section.
Troubleshooting
-^^^^^^^^^^^^^^^
-
-If you're getting a cryptic error message, visit the wiki:
-
- http://wiki.apache.org/couchdb/Error_messages
-
-For general troubleshooting, visit the wiki:
-
- http://wiki.apache.org/couchdb/Troubleshooting
-
-Running as a Daemon
-^^^^^^^^^^^^^^^^^^^
-
-Note: These instructions assume you have created the `couchdb` user. See the
-specific system information included below to learn how to reconfigure this.
-
-Note: If any of these methods report a failure you can run the `couchdb` command
-manually to see the error messages it is displaying.
-
-The `/usr/local/etc/logrotate.d/couchdb` file is provided as a logrotate
-configuration that you can use to rotate Apache CouchDB's logs.
-
-SysV/BSD-style Systems
-++++++++++++++++++++++
-
-Depending on your system the `couchdb` init script will be installed into a
-direcory called `init.d` (for SysV-style systems) or `rc.d` (for BSD-style
-systems). These examples use the `[init.d|rc.d]` notation to indicate this.
+----------------
-You can control the Apache CouchDB daemon by running:
+For troubleshooting, see:
- /usr/local/etc/[init.d|rc.d]/couchdb [start|stop|restart|force-reload|status]
+ http://wiki.apache.org/couchdb/Troubleshooting
-If you wish to configure how the init script works, such as which user to run
-Apache CouchDB as, you must edit the `/usr/local/etc/default/couchdb` file as
-appropriate. If you are running the init script as a non-superuser you need to
-remove the line with the `COUCHDB_USER` setting.
+If you're getting a cryptic error message, see:
-If you wish the Apache CouchDB daemon to run as a system service you need to
-copy the `/usr/local/etc/[init.d|rc.d]/couchdb` script into your system wide
-`/etc/[init.d|rc.d]` directory and update your system configuration.
+ http://wiki.apache.org/couchdb/Error_messages
-You may be able to configure your system using the following command:
+For general help, see:
- sudo update-rc.d couchdb defaults
+ http://couchdb.apache.org/community/lists.html
-Consult your system documentation for more information.
+The mailing lists provide a wealth of support and knowledge for you to tap into.
+Feel free to drop by with your questions or discussion. See the official CouchDB
+website for more information about our community resources.
-Mac OS X
-++++++++
-You can use the `launchctl` command to control the Apache CouchDB daemon.
+Running the Testsuite
+---------------------
-You can load the launchd configuration by running:
+Run the testsuite for couch.js and jquery.couch.js by browsing to this site: http://127.0.0.1:5984/_utils/spec/run.html
+It should work in at least Firefox >= 3.6 and Safari >= 4.0.4.
- sudo launchctl load /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist
+Read more about JSpec here: http://jspec.info/
-You can stop the Apache CouchDB daemon by running:
+Trouble shooting
+~~~~~~~~~~~~~~~~
- sudo launchctl unload /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist
+ * When you change the specs, but your changes have no effect, manually reload the changed spec file in the browser.
-You can start Apache CouchDB by running:
+ * When the spec that tests erlang views fails, make sure you have enabled erlang views as described here: <http://wiki.apache.org/couchdb/EnableErlangViews>
- sudo launchctl start org.apache.couchdb
-
-You can restart Apache CouchDB by running:
-
- sudo launchctl stop org.apache.couchdb
-
-You can change the launchd configuration by running:
-
- open /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist
-
-If you want the Apache CouchDB daemon to run at startup, copy the
-`/usr/local/Library/LaunchDaemons/org.apache.couchdb.plist` file to your system
-`/Library/LaunchDaemons` directory.
-
-Windows
-~~~~~~~
-
-The Windows build process is very similar to the erlang build process;
-indeed, we re-use some of their shell-scripts. Therefore, it is recommended
-you build erlang itself from sources - this will ensure that you have all the
-tools and environment necessary to build couch itself. A binary build of
-erlang should work for those in a hurry (see below), but it isn't really
-supported. See the end of these notes or information on building erlang
-which is relevant to couch.
-
-Build Tools
------------
-
-To build on Windows, you need the following tools:
-
-* cygwin - it isn't clear exactly which tools you need - select all
- 'development' tools. As mentioned above, if you can build erlang itself
- you have everything you need.
-
-* VS2008
-
-* Erlang - a built source distro of erlang is preferred. A binary
- distribution of Erlang is OK, but you will also need a source distribution
- for the cc.sh/link.sh etc wrapper shell scripts used by erlang - couch
- reuses these scripts in its build process and the license isn't compatible
- enough for us to clone them.
-
-Other tools:
-
-* Fetch the 'curl' sources and build them as per the instructions. The cygwin
- curl binaries are built with a different compiler so are no good.
-
-* Download the ICU binaries built with VS2008.
-
-* Download and build the same version of spidermonkey the version of couch
- requires - at time of writing this is 1.8. Different versions will not
- work (ie, at time of writing, 1.8.1 does not work with couch).
-
-Build Environment
------------------
-
-Setup your environment:
-
-For the sake of everything you find sacred:
- set CYGWIN=nontsec
-BEFORE starting any cygwin environments. Set this variable globally. Without
-it you can expect all kinds of permissions-related problems.
-
-Execute the VC .bat files to setup your environment such that cl.exe etc are
-on your path. Start a cygwin shell.
-
-Check your environment:
- * which link -> should point at the MS linker.
- * which cl -> should point at the MS compiler.
-
-If you are building from a source version of erlang:
-
- Executing "eval `./otp_build env_win32`" as per the Erlang build
- instructions, will have set everything up, including the CC, LD etc
- variables. Do this even if erlang is already built and you are just
- building couch. This will have set the variables ERL_TOP, CC, LD, AD
- and a number of others.
-
- Then, PATH must be adjusted such what 'which erl' lists the erl.exe
- you built rather than the 'erl' script in the erts/etc/win32/cygwin_tools
- directory - eg:
-
- $ export PATH=$ERL_TOP/release/win32/erts-5.7.2/bin:$PATH
-
-If you are building from a binary version of erlang:
-
- * set ERL_TOP to the top of the erlang directory
-
- Add to the PATH such that 'which erl' points at your erl.exe - eg:
-
- $ export PATH=$ERL_TOP/erts-5.6.5/bin
-
- You must also ensure the cc.sh etc scripts from the Erlang source tree is
- on your PATH. Use "which cc.sh" to ensure the path is set correctly.
-
- Then set more variables:
- $ export CC=cc.sh
- $ export LD=ld.sh
- $ export AR=ar.sh
- $ export RC=rc.sh
-
-Set COUCHDB_TOP to the source directory.
-
-And we should be ready to bootstrap and build.
-
-Building
---------
-
-We start by bootstrapping:
-
- $ cd $COUCHDB_TOP
- $ ./bootstrap
- You have bootstrapped Apache CouchDB, time to relax.
-
- Run `./configure' to configure the source before you install.
- $
-
-Relax.
-
-Now we need to run a complicated configure command-line.
-
- $ ./configure \
- --with-js-include=/cygdrive/c/path_to_seamonkey_include \
- --with-js-lib=/cygdrive/c/path_to_seamonkey_lib \
- --with-win32-icu-binaries=/cygdrive/c/path_to_icu_binaries_root \
- --with-erlang=$ERL_TOP/release/win32/usr/include \
- --with-win32-curl=/cygdrive/c/path/to/curl/root/directory \
- --with-openssl-bin-dir=c:/openssl/bin \
- --with-msvc-redist-dir=c:/dir/with/vcredist_platform_executable \
- --prefix=$ERL_TOP/release/win32
-
-Relax, then relax some more, then get a beer and relax some more; the
-above command may take many many minutes to complete...
-
-Note that all paths must be in cygwin format. Those starting with $ERL_TOP
-can be entered literally, assuming ERL_TOP is set as described above.
-
-Notes:
- When building the installer, the necessary openssl binaries are pulled from
- the directory pointed to --with-openssl-bin-dir.
-
-Now we can build it and "install" it into the $ERL_TOP/release/win32 (or
-wherever you set --prefix to above) directory:
-
- $ make install
-
-Relax on your new couch:
-
- The $ERL_TOP/release/win32 directory is now ready to .zip up, be packaged
- by an installer, etc. To test it in-place, execute:
-
- $ $ERL_TOP/release/win32/bin/couchdb.bat
-
- and everything should work fine.
-
-To create an installer, execute:
-
- $ make dist
-
-and look for etc/windows/setup-couch*.exe. Note - only do this after
-a clean build, not after testing in-place - otherwise your test database and
-log files will be shipped!
-
-
-Additional Notes:
-
-Building erlang:
-* Follow the instructions as described. You do need openssl, but don't need
- the GUI tools. You may like to execute:
-
- echo "skipping gs" > lib/gs/SKIP
- echo "skipping ic" > lib/ic/SKIP
-
- To skip them.
-
-* Ensure 'which link' points at the ms linker; one in /usr/bin may be found
- instead.
-
-* After executing './otp_build release -a', be sure to execute Install.exe in
- the release/win32 directory to setup the release/win32/bin dir correctly.
-
-Testing with Erlang
-^^^^^^^^^^^^^^^^^^^
-
-First, we need to get a copy of etap to run the tests.
-
- $ git clone git://github.com/ngerakines/etap.git
- $ cd etap
- $ make && sudo make install
-
-Now we should be able to run the etap test suite:
-
- $ cd /path/to/couchdb/
- $ make check
- ...
- prove test/etap/*.t
- test/etap/001-load........................ok
- test/etap/010-file-basics.................ok
- ...
- All tests successful.
- Files=28, Tests=363, 41 wallclock secs (...)
-
-Tests are also available to be run individually like such:
-
- $ ./test/etap/001-load.t
- # Current time local 2009-09-26 23:47:44
- # Using etap version "0.3.4"
- 1..39
- ok 1 - Loaded: couch_btree
- ...
Cryptographic Software Notice
-----------------------------
- This distribution includes cryptographic software. The country in
- which you currently reside may have restrictions on the import,
- possession, use, and/or re-export to another country, of
- encryption software. BEFORE using any encryption software, please
- check your country's laws, regulations and policies concerning the
- import, possession, or use, and re-export of encryption software, to
- see if this is permitted. See <http://www.wassenaar.org/> for more
- information.
-
- The U.S. Government Department of Commerce, Bureau of Industry and
- Security (BIS), has classified this software as Export Commodity
- Control Number (ECCN) 5D002.C.1, which includes information security
- software using or performing cryptographic functions with asymmetric
- algorithms. The form and manner of this Apache Software Foundation
- distribution makes it eligible for export under the License Exception
- ENC Technology Software Unrestricted (TSU) exception (see the BIS
- Export Administration Regulations, Section 740.13) for both object
- code and source code.
-
- The following provides more details on the included cryptographic
- software:
-
- CouchDB includes a HTTP client (ibrowse) with SSL functionality.
+This distribution includes cryptographic software. The country in which you
+currently reside may have restrictions on the import, possession, use, and/or
+re-export to another country, of encryption software. BEFORE using any
+encryption software, please check your country's laws, regulations and policies
+concerning the import, possession, or use, and re-export of encryption software,
+to see if this is permitted. See <http://www.wassenaar.org/> for more
+information.
+
+The U.S. Government Department of Commerce, Bureau of Industry and Security
+(BIS), has classified this software as Export Commodity Control Number (ECCN)
+5D002.C.1, which includes information security software using or performing
+cryptographic functions with asymmetric algorithms. The form and manner of this
+Apache Software Foundation distribution makes it eligible for export under the
+License Exception ENC Technology Software Unrestricted (TSU) exception (see the
+BIS Export Administration Regulations, Section 740.13) for both object code and
+source code.
+
+The following provides more details on the included cryptographic software:
+
+CouchDB includes a HTTP client (ibrowse) with SSL functionality.
diff --git a/THANKS b/THANKS
index 83f47153..13c0b8bf 100644
--- a/THANKS
+++ b/THANKS
@@ -10,7 +10,6 @@ suggesting improvements or submitting changes. Some of these people are:
* Sam Ruby <rubys@intertwingly.net>
* Carlos Valiente <superdupont@gmail.com>
* Till Klampaeckel <till@klampaeckel.de>
- * Benoit Chesneau <bchesneau@gmail.com>
* Jim Lindley <web@jimlindley.com>
* Yoan Blanc <yoan.blanc@gmail.com>
* Michael Gottesman <gottesmm@reed.edu>
@@ -28,7 +27,6 @@ suggesting improvements or submitting changes. Some of these people are:
* Brian Candler <B.Candler@pobox.com>
* Brad Anderson <brad@sankatygroup.com>
* Nick Gerakines <nick@gerakines.net>
- * Robert Newson <robert.newson@gmail.com>
* Bob Dionne <dionne@member.fsf.org>
* Kevin Ilchmann Jørgensen <kijmail@gmail.com>
* Dirkjan Ochtman <dirkjan@ochtman.nl>
@@ -40,7 +38,41 @@ suggesting improvements or submitting changes. Some of these people are:
* Joshua Bronson <jabronson@gmail.com>
* Kostis Sagonas <kostis@cs.ntua.gr>
* Matthew Hooker <mwhooker@gmail.com>
- * Filipe Manana <fdmanana@gmail.com>
* Ilia Cheishvili <ilia.cheishvili@gmail.com>
+ * Lena Herrmann <lena@zeromail.org>
+ * Jack Moffit <metajack@gmail.com>
+ * Damjan Georgievski <gdamjan@gmail.com>
+ * Jan Kassens <jan@kassens.net>
+ * James Marca <jmarca@translab.its.uci.edu>
+ * Matt Goodall <matt.goodall@gmail.com>
+ * Joel Clark <unsigned_char@yahoo.com>
+ * Matt Lyon <matt@flowerpowered.com>
+ * mikeal <mikeal.rogers@gmail.com>
+ * Randall Leeds <randall.leeds@gmail.com>
+ * Joscha Feth <joscha@feth.com>
+ * Jarrod Roberson <jarrod@vertigrated.com>
+ * Jae Kwon <jkwon.work@gmail.com>
+ * Gavin Sherry <swm@alcove.com.au>
+ * Timothy Smith <tim@couch.io>
+ * Martin Haaß <MartinHaass@gmx.net>
+ * Hans Ulrich Niedermann <hun@n-dimensional.de>
+ * Jason Smith <jhs@proven-corporation.com>
+ * Dmitry Unkovsky <oil.crayons@gmail.com>
+ * Zachary Zolton <zachary.zolton@gmail.com>
+ * Brian Jenkins <bonkydog@bonkydog.com>
+ * Paul Bonser <pib@paulbonser.com>
+ * Caleb Land <caleb.land@gmail.com>
+ * Juhani Ränkimies <juhani@juranki.com>
+ * Kev Jackson <foamdino@gmail.com>
+ * Jonathan D. Knezek <jdknezek@gmail.com>
+ * David Rose <doppler@gmail.com>
+ * Lim Yue Chuan <shasderias@gmail.com>
+ * David Davis <xantus@xantus.org>
+ * Klaus Trainer <klaus.trainer@web.de>
+ * Dale Harvey <dale@arandomurl.com>
+ * Juuso Väänänen <juuso@vaananen.org>
+ * Jeff Zellner <jeff.zellner@gmail.com>
+ * Benjamin Young <byoung@bigbluehat.com>
+ * Gabriel Farrell <gsf747@gmail.com>
For a list of authors see the `AUTHORS` file.
diff --git a/acinclude.m4.in b/acinclude.m4.in
index 3d854750..253831be 100644
--- a/acinclude.m4.in
+++ b/acinclude.m4.in
@@ -16,10 +16,10 @@ m4_define([LOCAL_PACKAGE_IDENTIFIER], [couchdb])
m4_define([LOCAL_PACKAGE_TARNAME], [apache-couchdb])
m4_define([LOCAL_PACKAGE_NAME], [Apache CouchDB])
m4_define([LOCAL_BUG_URI], [https://issues.apache.org/jira/browse/COUCHDB])
-m4_define([LOCAL_VERSION_MAJOR], [0])
-m4_define([LOCAL_VERSION_MINOR], [11])
+m4_define([LOCAL_VERSION_MAJOR], [1])
+m4_define([LOCAL_VERSION_MINOR], [1])
m4_define([LOCAL_VERSION_REVISION], [0])
-m4_define([LOCAL_VERSION_STAGE], [b])
+m4_define([LOCAL_VERSION_STAGE], [a])
m4_define([LOCAL_VERSION_RELEASE], [%release%])
m4_define([LOCAL_VERSION_PRIMARY],
[LOCAL_VERSION_MAJOR.LOCAL_VERSION_MINOR.LOCAL_VERSION_REVISION])
diff --git a/bin/couchdb.bat.tpl.in b/bin/couchdb.bat.tpl.in
index 2f16b2a7..48d78513 100644
--- a/bin/couchdb.bat.tpl.in
+++ b/bin/couchdb.bat.tpl.in
@@ -17,8 +17,10 @@ rem First change to the drive with the erlang bin directory
rem then change to the erlang bin directory
cd %~dp0
-rem Allow a different erlang executable (eg, werl) to be used.
-if "%ERL%x" == "x" set ERL=erl.exe
+rem Allow a different erlang executable (eg, erl) to be used.
+rem When using erl instead of werl, server restarts during test runs can fail
+rem intermittently. But using erl should be fine for production use.
+if "%ERL%x" == "x" set ERL=werl.exe
echo CouchDB %version% - prepare to relax...
%ERL% -sasl errlog_type error -s couch
diff --git a/bin/couchdb.tpl.in b/bin/couchdb.tpl.in
index bdea97fe..94d47439 100644
--- a/bin/couchdb.tpl.in
+++ b/bin/couchdb.tpl.in
@@ -15,6 +15,7 @@
BACKGROUND=false
DEFAULT_CONFIG_DIR=%localconfdir%/default.d
DEFAULT_CONFIG_FILE=%localconfdir%/%defaultini%
+ERL_START_OPTIONS="-sasl errlog_type error +K true +A 4"
HEART_BEAT_TIMEOUT=11
HEART_COMMAND="%bindir%/%couchdb_command_name% -k"
INTERACTIVE=false
@@ -61,11 +62,19 @@ Usage: $basename [OPTION]
The $basename command runs the %package_name% server.
+Erlang is called with:
+
+ $ERL_START_OPTIONS
+
Erlang inherits the environment of this command.
-The exit status is 0 for success or 1 for failure.
+You can override these options using the environment:
+
+ ERL_AFLAGS, ERL_FLAGS, ERL_ZFLAGS
-The \`-s' option will exit 0 for running and 1 for not running.
+See erl(1) for more information about the environment variables.
+
+The exit status is 0 for success or 1 for failure.
Options:
@@ -120,8 +129,10 @@ EOF
}
_add_config_dir () {
- for file in `find "$1" -mindepth 1 -maxdepth 1 -type f -name '*.ini'`; do
- _add_config_file $file
+ for file in "$1"/*.ini; do
+ if [ -r "$file" ]; then
+ _add_config_file "$file"
+ fi
done
}
@@ -211,7 +222,7 @@ start_couchdb () {
touch $PID_FILE
interactive_option="+Bd -noinput"
fi
- command="%ERL% $interactive_option -sasl errlog_type error +K true \
+ command="%ERL% $interactive_option $ERL_START_OPTIONS \
-env ERL_LIBS %localerlanglibdir% -couch_ini $start_arguments -s couch"
if test "$BACKGROUND" = "true" -a "$RECURSED" = "false"; then
$0 $background_start_arguments -b -r $RESPAWN_TIMEOUT -p $PID_FILE \
diff --git a/bootstrap b/bootstrap
index 8cfc2c01..0c576c40 100755
--- a/bootstrap
+++ b/bootstrap
@@ -33,7 +33,7 @@ mkdir -p build-aux
if test -z "$REVISION"; then
if test -d .git; then
- REVISION=`git log | head -1 | awk '{print $2}' | cut -b 1-8`-git
+ REVISION=`git log --pretty="format:%h" | head -1`-git
else
# default to svn
REVISION=`\`which svn\` info . 2> /dev/null | awk "/Revision:/{print \\$2}"`
@@ -56,9 +56,11 @@ fi
aclocal -I m4
autoheader -f
-automake -f -c -a --gnits 2>&1 | sed -e "/install/d"
+automake -f -a 2>&1 | sed -e "/install/d"
autoconf -f
+ln -f -s "`dirname \`readlink build-aux/missing\``/INSTALL"
+
cat << EOF
You have bootstrapped Apache CouchDB, time to relax.
diff --git a/configure.ac b/configure.ac
index 679c6cae..b776a258 100644
--- a/configure.ac
+++ b/configure.ac
@@ -10,14 +10,13 @@ dnl WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
dnl License for the specific language governing permissions and limitations
dnl under the License.
-m4_include([m4/ac_check_icu.m4])
-
AC_INIT([LOCAL_PACKAGE_NAME], [LOCAL_VERSION], [], [LOCAL_PACKAGE_TARNAME])
AC_PREREQ([2.59])
AC_CONFIG_SRCDIR([CHANGES])
AC_CONFIG_AUX_DIR([build-aux])
+AC_CONFIG_MACRO_DIR([m4])
AM_CONFIG_HEADER([config.h])
@@ -197,7 +196,7 @@ AC_ARG_WITH([win32-icu-binaries], [AC_HELP_STRING([--with-win32-icu-binaries=PAT
ICU_LOCAL_LDFLAGS="-L$withval/lib"
ICU_LOCAL_BIN=$withval/bin
], [
- AC_CHECK_ICU([3])
+ AC_CHECK_ICU([3.4.1])
ICU_LOCAL_CFLAGS=`$ICU_CONFIG --cppflags-searchpath`
ICU_LOCAL_LDFLAGS=`$ICU_CONFIG --ldflags-searchpath`
ICU_LOCAL_BIN=
@@ -260,6 +259,12 @@ if test `echo $version | ${AWK} "{print \\$2}"` -eq 6; then
fi
fi
+has_crypto=`${ERL} -eval "case application:load(crypto) of ok -> ok; _ -> exit(no_crypto) end." -noshell -s init stop`
+
+if test -n "$has_crypto"; then
+ AC_MSG_ERROR([Could not find the Erlang crypto library. Has Erlang been compiled with OpenSSL support?])
+fi
+
AC_PATH_PROG([ERLC], [erlc])
if test x${ERLC} = x; then
@@ -392,6 +397,7 @@ AC_CONFIG_FILES([etc/launchd/org.apache.couchdb.plist.tpl])
AC_CONFIG_FILES([etc/launchd/Makefile])
AC_CONFIG_FILES([etc/logrotate.d/couchdb.tpl])
AC_CONFIG_FILES([etc/logrotate.d/Makefile])
+AC_CONFIG_FILES([etc/windows/Makefile])
AC_CONFIG_FILES([etc/Makefile])
AC_CONFIG_FILES([share/Makefile])
AC_CONFIG_FILES([src/Makefile])
diff --git a/etc/Makefile.am b/etc/Makefile.am
index f83b0efb..148f7015 100644
--- a/etc/Makefile.am
+++ b/etc/Makefile.am
@@ -10,7 +10,7 @@
## License for the specific language governing permissions and limitations under
## the License.
-SUBDIRS = couchdb default init launchd logrotate.d
+SUBDIRS = couchdb default init launchd logrotate.d windows
default_sysconf_data = logrotate.d/couchdb
@@ -25,10 +25,15 @@ endif
nobase_sysconf_DATA = $(default_sysconf_data) $(init_sysconf_data)
-EXTRA_DIST = $(init_sysconf_data)
+EXTRA_DIST = \
+ default/couchdb \
+ windows/couchdb.iss.tpl \
+ windows/README.txt.tpl
if WINDOWS
-EXTRA_DIST += windows/setup-couchdb-@version@.exe.md5
+EXTRA_DIST += \
+ windows/setup-couchdb-@version@.exe.md5 \
+ windows/setup-couchdb-@version@.exe.sha
endif
CLEANFILES = $(init_DATA) $(default_sysconf_data) $(launchd_DATA)
@@ -72,7 +77,7 @@ if WINDOWS
# README.txt has \n line endings in the repo and must have \r\n
# when installed as notepad is used to view it.
# Also: the targets below don't seem to update after an svn-up (which
-# changes the version string in the generated files) so we trick make
+# changes the version string in the generated files) so we trick make
# into always building it with the FORCE pattern...
windows/README.txt: windows/README.txt.tpl FORCE
sed -e "s|%package_name%|@package_name@|g" \
@@ -93,12 +98,14 @@ windows/couchdb.iss: windows/couchdb.iss.tpl FORCE
# The installer depends on all files, not just the source .iss file,
# so we trick make into always building it with the FORCE pattern...
windows/setup-couchdb-@version@.exe: windows/couchdb.iss windows/README.txt FORCE
- $(INNO_COMPILER_EXECUTABLE) /q $<
+ "$(INNO_COMPILER_EXECUTABLE)" /q $<
@echo Windows Installer is at: `cygpath -a -w windows/setup-couchdb-@version@.exe`
windows/setup-couchdb-@version@.exe.md5: windows/setup-couchdb-@version@.exe
- md5sum $< > $@
- @echo Windows Installer is at: `cygpath -a -w windows/setup-couchdb-@version@.exe`
+ cd windows && md5sum ./$(<F) > $(@F)
+
+windows/setup-couchdb-@version@.exe.sha: windows/setup-couchdb-@version@.exe
+ cd windows && sha1sum ./$(<F) > $(@F)
FORCE:
diff --git a/etc/couchdb/Makefile.am b/etc/couchdb/Makefile.am
index ded52084..9367ceac 100644
--- a/etc/couchdb/Makefile.am
+++ b/etc/couchdb/Makefile.am
@@ -32,6 +32,7 @@ default.ini: default.ini.tpl
-e "s|%localbuilddatadir%|../share/couchdb|g" \
-e "s|%localstatelibdir%|../var/lib/couchdb|g" \
-e "s|%localstatelogdir%|../var/log/couchdb|g" \
+ -e "s|%localstaterundir%|../var/run/couchdb|g" \
-e "s|%couchprivlibdir%|../lib/couch-$(version)/priv/lib|g" \
-e "s|%couchjs_command_name%|couchjs.exe|g" \
< $< > $@
@@ -43,6 +44,7 @@ default.ini: default.ini.tpl
-e "s|%localbuilddatadir%|$(localdatadir)|g" \
-e "s|%localstatelibdir%|$(localstatelibdir)|g" \
-e "s|%localstatelogdir%|$(localstatelogdir)|g" \
+ -e "s|%localstaterundir%|$(localstaterundir)|g" \
-e "s|%couchprivlibdir%|$(couchprivlibdir)|g" \
-e "s|%couchjs_command_name%|$(couchjs_command_name)|g" \
< $< > $@
@@ -55,6 +57,7 @@ default_dev.ini: default.ini.tpl
-e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \
-e "s|%localstatelibdir%|$(abs_top_builddir)/tmp/lib|g" \
-e "s|%localstatelogdir%|$(abs_top_builddir)/tmp/log|g" \
+ -e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \
-e "s|%couchprivlibdir%|$(devcouchprivlibdir)|g" \
-e "s|%couchjs_command_name%|$(couchjs_dev_command_name)|g" \
< $< > $@
diff --git a/etc/couchdb/default.ini.tpl.in b/etc/couchdb/default.ini.tpl.in
index 409bfe98..ef5e4fba 100644
--- a/etc/couchdb/default.ini.tpl.in
+++ b/etc/couchdb/default.ini.tpl.in
@@ -11,22 +11,32 @@ max_attachment_chunk_size = 4294967296 ; 4GB
os_process_timeout = 5000 ; 5 seconds. for view and external servers.
max_dbs_open = 100
delayed_commits = true ; set this to false to ensure an fsync before 201 Created is returned
-batch_save_size = 1000 ; number of docs at which to save a batch
-batch_save_interval = 1000 ; milliseconds after which to save batches
+uri_file = %localstaterundir%/couch.uri
[httpd]
port = 5984
bind_address = 127.0.0.1
+max_connections = 2048
authentication_handlers = {couch_httpd_oauth, oauth_authentication_handler}, {couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler}
default_handler = {couch_httpd_db, handle_request}
+secure_rewrites = true
+vhost_global_handlers = _utils, _uuids, _session, _oauth, _users
+allow_jsonp = false
+
+[ssl]
+port = 6984
[log]
file = %localstatelogdir%/couch.log
level = info
+include_sasl = true
[couch_httpd_auth]
-authentication_db = users
+authentication_db = _users
+authentication_redirect = /_utils/session.html
require_valid_user = false
+timeout = 43200 ; (default to 12 hours) number of seconds before automatic logout
+auth_cache_size = 50 ; size is number of cache entries
[query_servers]
javascript = %bindir%/%couchjs_command_name% %localbuilddatadir%/server/main.js
@@ -36,11 +46,7 @@ javascript = %bindir%/%couchjs_command_name% %localbuilddatadir%/server/main.js
; please let us know on the mailing list so we can fine tune the heuristic.
[query_server_config]
reduce_limit = true
-
-; enable external as an httpd handler, then link it with commands here.
-; note, this api is still under consideration.
-; [external]
-; mykey = /path/to/mycommand
+os_process_limit = 25
[daemons]
view_manager={couch_view, start_link, []}
@@ -51,6 +57,10 @@ httpd={couch_httpd, start_link, []}
stats_aggregator={couch_stats_aggregator, start, []}
stats_collector={couch_stats_collector, start, []}
uuids={couch_uuids, start, []}
+auth_cache={couch_auth_cache, start_link, []}
+rep_db_changes_listener={couch_rep_db_listener, start_link, []}
+vhosts={couch_httpd_vhost, start_link, []}
+os_daemons={couch_os_daemons, start_link, []}
[httpd_global_handlers]
/ = {couch_httpd_misc_handlers, handle_welcome_req, <<"Welcome">>}
@@ -65,7 +75,6 @@ _uuids = {couch_httpd_misc_handlers, handle_uuids_req}
_restart = {couch_httpd_misc_handlers, handle_restart_req}
_stats = {couch_httpd_stats_handlers, handle_stats_req}
_log = {couch_httpd_misc_handlers, handle_log_req}
-_sleep = {couch_httpd_misc_handlers, handle_sleep_req}
_session = {couch_httpd_auth, handle_session_req}
_oauth = {couch_httpd_oauth, handle_oauth_req}
@@ -87,8 +96,21 @@ _view = {couch_httpd_view, handle_view_req}
_show = {couch_httpd_show, handle_doc_show_req}
_list = {couch_httpd_show, handle_view_list_req}
_info = {couch_httpd_db, handle_design_info_req}
+_rewrite = {couch_httpd_rewrite, handle_rewrite_req}
_update = {couch_httpd_show, handle_doc_update_req}
+; enable external as an httpd handler, then link it with commands here.
+; note, this api is still under consideration.
+; [external]
+; mykey = /path/to/mycommand
+
+; Here you can setup commands for CouchDB to manage
+; while it is alive. It will attempt to keep each command
+; alive if it exits.
+; [os_daemons]
+; some_daemon_name = /path/to/script -with args
+
+
[uuids]
; Known algorithms:
; random - 128 bits of random awesome
@@ -99,7 +121,7 @@ _update = {couch_httpd_show, handle_doc_update_req}
; random prefix is regenerated and the process starts over.
; utc_random - Time since Jan 1, 1970 UTC with microseconds
; First 14 characters are the time in hex. Last 18 are random.
-algorithm = random
+algorithm = sequential
[stats]
; rate is in milliseconds
@@ -107,3 +129,17 @@ rate = 1000
; sample intervals are in seconds
samples = [0, 60, 300, 900]
+[attachments]
+compression_level = 8 ; from 1 (lowest, fastest) to 9 (highest, slowest), 0 to disable compression
+compressible_types = text/*, application/javascript, application/json, application/xml
+
+[replicator]
+db = _replicator
+max_http_sessions = 10
+max_http_pipeline_size = 10
+; set to true to validate peer certificates
+verify_ssl_certificates = false
+; file containing a list of peer trusted certificates (PEM format)
+; ssl_trusted_certificates_file = /etc/ssl/certs/ca-certificates.crt
+; maximum peer certificate depth (must be set even if certificate validation is off)
+ssl_certificate_max_depth = 3
diff --git a/etc/couchdb/local.ini b/etc/couchdb/local.ini
index 96fcdc76..1c4060ab 100644
--- a/etc/couchdb/local.ini
+++ b/etc/couchdb/local.ini
@@ -13,9 +13,49 @@
; Uncomment next line to trigger basic-auth popup on unauthorized requests.
;WWW-Authenticate = Basic realm="administrator"
+; Uncomment next line to set the configuration modification whitelist. Only
+; whitelisted values may be changed via the /_config URLs. To allow the admin
+; to change this value over HTTP, remember to include {httpd,config_whitelist}
+; itself. Excluding it from the list would require editing this file to update
+; the whitelist.
+;config_whitelist = [{httpd,config_whitelist}, {log,level}, {etc,etc}]
+
+[httpd_global_handlers]
+;_google = {couch_httpd_proxy, handle_proxy_req, <<"http://www.google.com">>}
+
+[couch_httpd_auth]
+; If you set this to true, you should also uncomment the WWW-Authenticate line
+; above. If you don't configure a WWW-Authenticate header, CouchDB will send
+; Basic realm="server" in order to prevent you getting logged out.
+; require_valid_user = false
+
[log]
;level = debug
+[os_daemons]
+; For any commands listed here, CouchDB will attempt to ensure that
+; the process remains alive while CouchDB runs as well as shut them
+; down when CouchDB exits.
+;foo = /path/to/command -with args
+
+[daemons]
+; enable SSL support by uncommenting the following line and supply the PEM's below.
+; the default ssl port CouchDB listens on is 6984
+; httpsd = {couch_httpd, start_link, [https]}
+
+[ssl]
+;cert_file = /full/path/to/server_cert.pem
+;key_file = /full/path/to/server_key.pem
+
+; To enable Virtual Hosts in CouchDB, add a vhost = path directive. All requests to
+; the Virual Host will be redirected to the path. In the example below all requests
+; to http://example.com/ are redirected to /database.
+; If you run CouchDB on a specific port, include the port number in the vhost:
+; example.com:5984 = /database
+
+[vhosts]
+;example.com = /database/
+
[update_notification]
;unique notifier name=/full/path/to/exe -with "cmd line arg"
diff --git a/etc/init/couchdb.tpl.in b/etc/init/couchdb.tpl.in
index 8afd5d62..3b8d17ea 100644
--- a/etc/init/couchdb.tpl.in
+++ b/etc/init/couchdb.tpl.in
@@ -141,7 +141,7 @@ parse_script_option_list () {
log_end_msg $SCRIPT_ERROR
fi
;;
- restart|force-reload)
+ restart)
log_daemon_msg "Restarting $DESCRIPTION" $NAME
if stop_couchdb; then
if start_couchdb; then
@@ -158,7 +158,7 @@ parse_script_option_list () {
;;
*)
cat << EOF >&2
-Usage: $SCRIPT_NAME {start|stop|restart|force-reload|status}
+Usage: $SCRIPT_NAME {start|stop|restart|status}
EOF
exit $SCRIPT_ERROR
;;
diff --git a/etc/windows/Makefile.am b/etc/windows/Makefile.am
new file mode 100644
index 00000000..5b4faae0
--- /dev/null
+++ b/etc/windows/Makefile.am
@@ -0,0 +1,13 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+## http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+## This file intentionally left blank.
diff --git a/license.skip b/license.skip
index abd8f8cb..a7aa6ec3 100644
--- a/license.skip
+++ b/license.skip
@@ -1,15 +1,30 @@
\.svn
+^AUTHORS
+^BUGS
+^CHANGES
+^DEVELOPERS
+^DEVELOPERS.gz
+^INSTALL
+^INSTALL.Unix
+^INSTALL.Unix.gz
+^INSTALL.Windows
+^INSTALL.Windows.gz
+^INSTALL.gz
+^LICENSE.gz
+^Makefile
+^Makefile.in
+^NEWS
+^NOTICE
+^README
+^THANKS
^aclocal.m4
^apache-couchdb-*
-^AUTHORS
^autom4te.cache/*
-^bin/couchdb.1
-^bin/couchjs.1
^bin/Makefile
^bin/Makefile.in
-^BUGS
+^bin/couchdb.1
+^bin/couchjs.1
^build-aux/*
-^CHANGES
^config.*
^configure
^couchdb.stderr
@@ -19,76 +34,73 @@
^erl_crash.dump
^etc/Makefile
^etc/Makefile.in
-^etc/couchdb/default*
-^etc/couchdb/local*
^etc/couchdb/Makefile
^etc/couchdb/Makefile.in
-^etc/default/couchdb
+^etc/couchdb/default*
+^etc/couchdb/local*
^etc/default/Makefile
^etc/default/Makefile.in
+^etc/default/couchdb
^etc/init/Makefile
^etc/init/Makefile.in
-^etc/launchd/org.apache.couchdb.plist.*
^etc/launchd/Makefile
^etc/launchd/Makefile.in
-^etc/logrotate.d/couchdb*
+^etc/launchd/org.apache.couchdb.plist.*
^etc/logrotate.d/Makefile
^etc/logrotate.d/Makefile.in
+^etc/logrotate.d/couchdb*
+^etc/windows/Makefile
^etc/windows/README.txt.tpl
^libtool
^license.skip
-^m4/ac_check_curl.m4*
-^m4/ac_check_icu.m4*
-^Makefile
-^Makefile.in
-^NEWS
-^NOTICE
-^README
+^m4/*
^share/Makefile
^share/Makefile.in
+^share/server/json2.js
^share/server/mimeparse.js
^share/www/favicon.ico
^share/www/image/*
^share/www/script/jquery.*
^share/www/script/json2.js
+^share/www/script/jspec/*
^share/www/script/sha1.js
+^share/www/script/base64.js
^share/www/script/test/lorem*
^src/Makefile
^src/Makefile.in
^src/couchdb/.*beam
-^src/couchdb/couch.app.tpl.in
^src/couchdb/.deps/*
-^src/couchdb/couch.app*
^src/couchdb/Makefile
^src/couchdb/Makefile.in
-^src/couchdb/priv/.deps/*
+^src/couchdb/couch.app*
+^src/couchdb/couch.app.tpl.in
^src/couchdb/priv/.*o
+^src/couchdb/priv/.deps/*
^src/couchdb/priv/Makefile
^src/couchdb/priv/Makefile.in
+^src/couchdb/priv/couch_icu_driver.la
^src/couchdb/priv/couchjs
^src/couchdb/priv/couchspawnkillable
-^src/couchdb/priv/couch_icu_driver.la
^src/couchdb/priv/stat_descriptions.cfg
^src/erlang-oauth/*
^src/etap/*
^src/ibrowse/*
^src/mochiweb/*
^stamp-h1
+^test/Makefile
+^test/Makefile.in
^test/bench/Makefile
^test/bench/Makefile.in
-^test/etap/temp.*
^test/etap/.*beam
-^test/local.ini
-^test/Makefile
-^test/Makefile.in
^test/etap/Makefile
^test/etap/Makefile.in
+^test/etap/temp.*
^test/javascript/Makefile
^test/javascript/Makefile.in
+^test/local.ini
^test/view_server/Makefile
^test/view_server/Makefile.in
^tmp/*
-^THANKS
^utils/Makefile
^utils/Makefile.in
^var/Makefile
diff --git a/share/Makefile.am b/share/Makefile.am
index 694b89b7..77c94ea3 100644
--- a/share/Makefile.am
+++ b/share/Makefile.am
@@ -13,6 +13,7 @@
JS_FILE = server/main.js
JS_FILE_COMPONENTS = \
+ server/json2.js \
server/filter.js \
server/mimeparse.js \
server/render.js \
@@ -33,17 +34,26 @@ CLEANFILES = $(JS_FILE)
EXTRA_DIST = $(JS_FILE_COMPONENTS) $(JS_FILE_COMPONENTS_LAST)
+nobase_localdata_SCRIPTS = \
+ $(JS_FILE)
+
nobase_dist_localdata_DATA = \
- $(JS_FILE) \
www/config.html \
www/couch_tests.html \
www/custom_test.html \
www/database.html \
- www/dialog/_compact_database.html \
- www/dialog/_view_cleanup.html \
+ www/session.html \
+ www/dialog/_admin_party.html \
+ www/dialog/_compact_cleanup.html \
+ www/dialog/_create_admin.html \
+ www/dialog/_login.html \
+ www/dialog/_signup.html \
www/dialog/_create_database.html \
+ www/dialog/_create_config.html \
www/dialog/_delete_database.html \
www/dialog/_delete_document.html \
+ www/dialog/_database_security.html \
+ www/dialog/_share_test_reports.html \
www/dialog/_save_view_as.html \
www/dialog/_upload_attachment.html \
www/document.html \
@@ -57,6 +67,7 @@ nobase_dist_localdata_DATA = \
www/image/delete.png \
www/image/grippie.gif \
www/image/hgrad.gif \
+ www/image/key.png \
www/image/load.png \
www/image/logo.png \
www/image/order-asc.gif \
@@ -93,14 +104,21 @@ nobase_dist_localdata_DATA = \
www/script/jquery.resizer.js \
www/script/jquery.suggest.js \
www/script/json2.js \
+ www/script/jspec/jspec.css \
+ www/script/jspec/jspec.jquery.js \
+ www/script/jspec/jspec.js \
+ www/script/jspec/jspec.xhr.js \
www/script/oauth.js \
www/script/sha1.js \
+ www/script/base64.js \
www/script/test/all_docs.js \
www/script/test/attachments.js \
www/script/test/attachments_multipart.js \
www/script/test/attachment_names.js \
www/script/test/attachment_paths.js \
+ www/script/test/attachment_ranges.js \
www/script/test/attachment_views.js \
+ www/script/test/auth_cache.js \
www/script/test/basics.js \
www/script/test/batch_save.js \
www/script/test/bulk_docs.js \
@@ -127,34 +145,52 @@ nobase_dist_localdata_DATA = \
www/script/test/lorem.txt \
www/script/test/lorem_b64.txt \
www/script/test/lots_of_docs.js \
+ www/script/test/method_override.js \
www/script/test/multiple_rows.js \
www/script/test/oauth.js \
+ www/script/test/proxyauth.js \
www/script/test/purge.js \
+ www/script/test/reader_acl.js \
www/script/test/recreate_doc.js \
www/script/test/reduce.js \
www/script/test/reduce_builtin.js \
www/script/test/reduce_false.js \
www/script/test/reduce_false_temp.js \
www/script/test/replication.js \
+ www/script/test/replicator_db.js \
www/script/test/rev_stemming.js \
+ www/script/test/rewrite.js \
www/script/test/security_validation.js \
www/script/test/show_documents.js \
www/script/test/stats.js \
www/script/test/update_documents.js \
+ www/script/test/users_db.js \
www/script/test/utf8.js \
www/script/test/uuids.js \
www/script/test/view_collation.js \
www/script/test/view_collation_raw.js \
www/script/test/view_conflicts.js \
+ www/script/test/view_compaction.js \
www/script/test/view_errors.js \
www/script/test/view_include_docs.js \
www/script/test/view_multi_key_all_docs.js \
www/script/test/view_multi_key_design.js \
www/script/test/view_multi_key_temp.js \
www/script/test/view_offsets.js \
+ www/script/test/view_update_seq.js \
www/script/test/view_pagination.js \
www/script/test/view_sandboxing.js \
www/script/test/view_xml.js \
+ www/spec/couch_js_class_methods_spec.js \
+ www/spec/couch_js_instance_methods_1_spec.js \
+ www/spec/couch_js_instance_methods_2_spec.js \
+ www/spec/couch_js_instance_methods_3_spec.js \
+ www/spec/custom_helpers.js \
+ www/spec/jquery_couch_js_class_methods_spec.js \
+ www/spec/jquery_couch_js_instance_methods_1_spec.js \
+ www/spec/jquery_couch_js_instance_methods_2_spec.js \
+ www/spec/jquery_couch_js_instance_methods_3_spec.js \
+ www/spec/run.html \
www/status.html \
www/style/layout.css \
www/_sidebar.html
diff --git a/share/server/filter.js b/share/server/filter.js
index 8ba77e64..1e8556a4 100644
--- a/share/server/filter.js
+++ b/share/server/filter.js
@@ -15,9 +15,8 @@ var Filter = {
var results = [];
var docs = args[0];
var req = args[1];
- var userCtx = args[2];
for (var i=0; i < docs.length; i++) {
- results.push((fun.apply(ddoc, [docs[i], req, userCtx]) && true) || false);
+ results.push((fun.apply(ddoc, [docs[i], req]) && true) || false);
};
respond([true, results]);
}
diff --git a/share/server/json2.js b/share/server/json2.js
new file mode 100644
index 00000000..a1a3b170
--- /dev/null
+++ b/share/server/json2.js
@@ -0,0 +1,482 @@
+/*
+ http://www.JSON.org/json2.js
+ 2010-03-20
+
+ Public Domain.
+
+ NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+
+ See http://www.JSON.org/js.html
+
+
+ This code should be minified before deployment.
+ See http://javascript.crockford.com/jsmin.html
+
+ USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
+ NOT CONTROL.
+
+
+ This file creates a global JSON object containing two methods: stringify
+ and parse.
+
+ JSON.stringify(value, replacer, space)
+ value any JavaScript value, usually an object or array.
+
+ replacer an optional parameter that determines how object
+ values are stringified for objects. It can be a
+ function or an array of strings.
+
+ space an optional parameter that specifies the indentation
+ of nested structures. If it is omitted, the text will
+ be packed without extra whitespace. If it is a number,
+ it will specify the number of spaces to indent at each
+ level. If it is a string (such as '\t' or '&nbsp;'),
+ it contains the characters used to indent at each level.
+
+ This method produces a JSON text from a JavaScript value.
+
+ When an object value is found, if the object contains a toJSON
+ method, its toJSON method will be called and the result will be
+ stringified. A toJSON method does not serialize: it returns the
+ value represented by the name/value pair that should be serialized,
+ or undefined if nothing should be serialized. The toJSON method
+ will be passed the key associated with the value, and this will be
+ bound to the value
+
+ For example, this would serialize Dates as ISO strings.
+
+ Date.prototype.toJSON = function (key) {
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ return this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z';
+ };
+
+ You can provide an optional replacer method. It will be passed the
+ key and value of each member, with this bound to the containing
+ object. The value that is returned from your method will be
+ serialized. If your method returns undefined, then the member will
+ be excluded from the serialization.
+
+ If the replacer parameter is an array of strings, then it will be
+ used to select the members to be serialized. It filters the results
+ such that only members with keys listed in the replacer array are
+ stringified.
+
+ Values that do not have JSON representations, such as undefined or
+ functions, will not be serialized. Such values in objects will be
+ dropped; in arrays they will be replaced with null. You can use
+ a replacer function to replace those with JSON values.
+ JSON.stringify(undefined) returns undefined.
+
+ The optional space parameter produces a stringification of the
+ value that is filled with line breaks and indentation to make it
+ easier to read.
+
+ If the space parameter is a non-empty string, then that string will
+ be used for indentation. If the space parameter is a number, then
+ the indentation will be that many spaces.
+
+ Example:
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}]);
+ // text is '["e",{"pluribus":"unum"}]'
+
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
+ // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
+
+ text = JSON.stringify([new Date()], function (key, value) {
+ return this[key] instanceof Date ?
+ 'Date(' + this[key] + ')' : value;
+ });
+ // text is '["Date(---current time---)"]'
+
+
+ JSON.parse(text, reviver)
+ This method parses a JSON text to produce an object or array.
+ It can throw a SyntaxError exception.
+
+ The optional reviver parameter is a function that can filter and
+ transform the results. It receives each of the keys and values,
+ and its return value is used instead of the original value.
+ If it returns what it received, then the structure is not modified.
+ If it returns undefined then the member is deleted.
+
+ Example:
+
+ // Parse the text. Values that look like ISO date strings will
+ // be converted to Date objects.
+
+ myData = JSON.parse(text, function (key, value) {
+ var a;
+ if (typeof value === 'string') {
+ a =
+/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
+ if (a) {
+ return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+ +a[5], +a[6]));
+ }
+ }
+ return value;
+ });
+
+ myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
+ var d;
+ if (typeof value === 'string' &&
+ value.slice(0, 5) === 'Date(' &&
+ value.slice(-1) === ')') {
+ d = new Date(value.slice(5, -1));
+ if (d) {
+ return d;
+ }
+ }
+ return value;
+ });
+
+
+ This is a reference implementation. You are free to copy, modify, or
+ redistribute.
+*/
+
+/*jslint evil: true, strict: false */
+
+/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
+ call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
+ getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
+ lastIndex, length, parse, prototype, push, replace, slice, stringify,
+ test, toJSON, toString, valueOf
+*/
+
+
+// Create a JSON object only if one does not already exist. We create the
+// methods in a closure to avoid creating global variables.
+
+if (!this.JSON) {
+ this.JSON = {};
+}
+
+(function () {
+
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ if (typeof Date.prototype.toJSON !== 'function') {
+
+ Date.prototype.toJSON = function (key) {
+
+ return isFinite(this.valueOf()) ?
+ this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z' : null;
+ };
+
+ String.prototype.toJSON =
+ Number.prototype.toJSON =
+ Boolean.prototype.toJSON = function (key) {
+ return this.valueOf();
+ };
+ }
+
+ var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ gap,
+ indent,
+ meta = { // table of character substitutions
+ '\b': '\\b',
+ '\t': '\\t',
+ '\n': '\\n',
+ '\f': '\\f',
+ '\r': '\\r',
+ '"' : '\\"',
+ '\\': '\\\\'
+ },
+ rep;
+
+
+ function quote(string) {
+
+// If the string contains no control characters, no quote characters, and no
+// backslash characters, then we can safely slap some quotes around it.
+// Otherwise we must also replace the offending characters with safe escape
+// sequences.
+
+ escapable.lastIndex = 0;
+ return escapable.test(string) ?
+ '"' + string.replace(escapable, function (a) {
+ var c = meta[a];
+ return typeof c === 'string' ? c :
+ '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
+ }) + '"' :
+ '"' + string + '"';
+ }
+
+
+ function str(key, holder) {
+
+// Produce a string from holder[key].
+
+ var i, // The loop counter.
+ k, // The member key.
+ v, // The member value.
+ length,
+ mind = gap,
+ partial,
+ value = holder[key];
+
+// If the value has a toJSON method, call it to obtain a replacement value.
+
+ if (value && typeof value === 'object' &&
+ typeof value.toJSON === 'function') {
+ value = value.toJSON(key);
+ }
+
+// If we were called with a replacer function, then call the replacer to
+// obtain a replacement value.
+
+ if (typeof rep === 'function') {
+ value = rep.call(holder, key, value);
+ }
+
+// What happens next depends on the value's type.
+
+ switch (typeof value) {
+ case 'string':
+ return quote(value);
+
+ case 'number':
+
+// JSON numbers must be finite. Encode non-finite numbers as null.
+
+ return isFinite(value) ? String(value) : 'null';
+
+ case 'boolean':
+ case 'null':
+
+// If the value is a boolean or null, convert it to a string. Note:
+// typeof null does not produce 'null'. The case is included here in
+// the remote chance that this gets fixed someday.
+
+ return String(value);
+
+// If the type is 'object', we might be dealing with an object or an array or
+// null.
+
+ case 'object':
+
+// Due to a specification blunder in ECMAScript, typeof null is 'object',
+// so watch out for that case.
+
+ if (!value) {
+ return 'null';
+ }
+
+// Make an array to hold the partial results of stringifying this object value.
+
+ gap += indent;
+ partial = [];
+
+// Is the value an array?
+
+ if (Object.prototype.toString.apply(value) === '[object Array]') {
+
+// The value is an array. Stringify every element. Use null as a placeholder
+// for non-JSON values.
+
+ length = value.length;
+ for (i = 0; i < length; i += 1) {
+ partial[i] = str(i, value) || 'null';
+ }
+
+// Join all of the elements together, separated with commas, and wrap them in
+// brackets.
+
+ v = partial.length === 0 ? '[]' :
+ gap ? '[\n' + gap +
+ partial.join(',\n' + gap) + '\n' +
+ mind + ']' :
+ '[' + partial.join(',') + ']';
+ gap = mind;
+ return v;
+ }
+
+// If the replacer is an array, use it to select the members to be stringified.
+
+ if (rep && typeof rep === 'object') {
+ length = rep.length;
+ for (i = 0; i < length; i += 1) {
+ k = rep[i];
+ if (typeof k === 'string') {
+ v = str(k, value);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ } else {
+
+// Otherwise, iterate through all of the keys in the object.
+
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = str(k, value);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ }
+
+// Join all of the member texts together, separated with commas,
+// and wrap them in braces.
+
+ v = partial.length === 0 ? '{}' :
+ gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
+ mind + '}' : '{' + partial.join(',') + '}';
+ gap = mind;
+ return v;
+ }
+ }
+
+// If the JSON object does not yet have a stringify method, give it one.
+
+ if (typeof JSON.stringify !== 'function') {
+ JSON.stringify = function (value, replacer, space) {
+
+// The stringify method takes a value and an optional replacer, and an optional
+// space parameter, and returns a JSON text. The replacer can be a function
+// that can replace values, or an array of strings that will select the keys.
+// A default replacer method can be provided. Use of the space parameter can
+// produce text that is more easily readable.
+
+ var i;
+ gap = '';
+ indent = '';
+
+// If the space parameter is a number, make an indent string containing that
+// many spaces.
+
+ if (typeof space === 'number') {
+ for (i = 0; i < space; i += 1) {
+ indent += ' ';
+ }
+
+// If the space parameter is a string, it will be used as the indent string.
+
+ } else if (typeof space === 'string') {
+ indent = space;
+ }
+
+// If there is a replacer, it must be a function or an array.
+// Otherwise, throw an error.
+
+ rep = replacer;
+ if (replacer && typeof replacer !== 'function' &&
+ (typeof replacer !== 'object' ||
+ typeof replacer.length !== 'number')) {
+ throw new Error('JSON.stringify');
+ }
+
+// Make a fake root object containing our value under the key of ''.
+// Return the result of stringifying the value.
+
+ return str('', {'': value});
+ };
+ }
+
+
+// If the JSON object does not yet have a parse method, give it one.
+
+ if (typeof JSON.parse !== 'function') {
+ JSON.parse = function (text, reviver) {
+
+// The parse method takes a text and an optional reviver function, and returns
+// a JavaScript value if the text is a valid JSON text.
+
+ var j;
+
+ function walk(holder, key) {
+
+// The walk method is used to recursively walk the resulting structure so
+// that modifications can be made.
+
+ var k, v, value = holder[key];
+ if (value && typeof value === 'object') {
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = walk(value, k);
+ if (v !== undefined) {
+ value[k] = v;
+ } else {
+ delete value[k];
+ }
+ }
+ }
+ }
+ return reviver.call(holder, key, value);
+ }
+
+
+// Parsing happens in four stages. In the first stage, we replace certain
+// Unicode characters with escape sequences. JavaScript handles many characters
+// incorrectly, either silently deleting them, or treating them as line endings.
+
+ text = String(text);
+ cx.lastIndex = 0;
+ if (cx.test(text)) {
+ text = text.replace(cx, function (a) {
+ return '\\u' +
+ ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
+ });
+ }
+
+// In the second stage, we run the text against regular expressions that look
+// for non-JSON patterns. We are especially concerned with '()' and 'new'
+// because they can cause invocation, and '=' because it can cause mutation.
+// But just to be safe, we want to reject all unexpected forms.
+
+// We split the second stage into 4 regexp operations in order to work around
+// crippling inefficiencies in IE's and Safari's regexp engines. First we
+// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
+// replace all simple value tokens with ']' characters. Third, we delete all
+// open brackets that follow a colon or comma or that begin the text. Finally,
+// we look to see that the remaining characters are only whitespace or ']' or
+// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
+
+ if (/^[\],:{}\s]*$/.
+test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
+replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
+replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
+
+// In the third stage we use the eval function to compile the text into a
+// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
+// in JavaScript: it can begin a block or an object literal. We wrap the text
+// in parens to eliminate the ambiguity.
+
+ j = eval('(' + text + ')');
+
+// In the optional fourth stage, we recursively walk the new structure, passing
+// each name/value pair to a reviver function for possible transformation.
+
+ return typeof reviver === 'function' ?
+ walk({'': j}, '') : j;
+ }
+
+// If the text is not JSON parseable, then a SyntaxError is thrown.
+
+ throw new SyntaxError('JSON.parse');
+ };
+ }
+}());
diff --git a/share/server/loop.js b/share/server/loop.js
index 84e35dc5..d2a07f61 100644
--- a/share/server/loop.js
+++ b/share/server/loop.js
@@ -20,11 +20,13 @@ function init_sandbox() {
sandbox.sum = Views.sum;
sandbox.log = log;
sandbox.toJSON = Couch.toJSON;
+ sandbox.JSON = JSON;
sandbox.provides = Mime.provides;
sandbox.registerType = Mime.registerType;
sandbox.start = Render.start;
sandbox.send = Render.send;
sandbox.getRow = Render.getRow;
+ sandbox.isArray = isArray;
} catch (e) {
log(e.toSource());
}
@@ -73,7 +75,7 @@ var DDoc = (function() {
if (i+1 == funPath.length) {
fun = point[funPath[i]]
if (typeof fun != "function") {
- fun = Couch.compileFunction(fun);
+ fun = Couch.compileFunction(fun, ddoc);
// cache the compiled fun on the ddoc
point[funPath[i]] = fun
};
@@ -99,6 +101,7 @@ var Loop = function() {
// "view" : Views.handler,
"reset" : State.reset,
"add_fun" : State.addFun,
+ "add_lib" : State.addLib,
"map_doc" : Views.mapDoc,
"reduce" : Views.reduce,
"rereduce" : Views.rereduce
diff --git a/share/server/render.js b/share/server/render.js
index 45782d76..9dcfbcd6 100644
--- a/share/server/render.js
+++ b/share/server/render.js
@@ -210,10 +210,27 @@ var Render = (function() {
return s;
};
+ function isDocRequestPath(info) {
+ var path = info.path;
+ return path.length > 5;
+ };
+
function runShow(fun, ddoc, args) {
try {
+ resetList();
Mime.resetProvides();
- var resp = fun.apply(ddoc, args);
+ var resp = fun.apply(ddoc, args) || {};
+
+ // handle list() style API
+ if (chunks.length && chunks.length > 0) {
+ resp = maybeWrapResponse(resp);
+ resp.headers = resp.headers || {};
+ for(var header in startResp) {
+ resp.headers[header] = startResp[header]
+ }
+ resp.body = chunks.join("") + (resp.body || "");
+ resetList();
+ }
if (Mime.providesUsed) {
resp = Mime.runProvides(args[1]);
@@ -227,7 +244,11 @@ var Render = (function() {
throw(["error", "render_error", "undefined response from show function"]);
}
} catch(e) {
- renderError(e, fun.toSource());
+ if (args[0] === null && isDocRequestPath(args[1])) {
+ throw(["error", "not_found", "document not found"]);
+ } else {
+ renderError(e, fun.toSource());
+ }
}
};
diff --git a/share/server/state.js b/share/server/state.js
index 9af9e475..e6416382 100644
--- a/share/server/state.js
+++ b/share/server/state.js
@@ -14,6 +14,7 @@ var State = {
reset : function(config) {
// clear the globals and run gc
State.funs = [];
+ State.lib = null;
State.query_config = config || {};
init_sandbox();
gc();
@@ -21,7 +22,11 @@ var State = {
},
addFun : function(newFun) {
// Compile to a function and add it to funs array
- State.funs.push(Couch.compileFunction(newFun));
+ State.funs.push(Couch.compileFunction(newFun, {views : {lib : State.lib}}));
+ print("true");
+ },
+ addLib : function(lib) {
+ State.lib = lib;
print("true");
}
}
diff --git a/share/server/util.js b/share/server/util.js
index bd4abc1d..b55480b9 100644
--- a/share/server/util.js
+++ b/share/server/util.js
@@ -10,22 +10,80 @@
// License for the specific language governing permissions and limitations under
// the License.
+var resolveModule = function(names, mod, root) {
+ if (names.length == 0) {
+ if (typeof mod.current != "string") {
+ throw ["error","invalid_require_path",
+ 'Must require a JavaScript string, not: '+(typeof mod.current)];
+ }
+ return {
+ current : mod.current,
+ parent : mod.parent,
+ id : mod.id,
+ exports : {}
+ }
+ }
+ // we need to traverse the path
+ var n = names.shift();
+ if (n == '..') {
+ if (!(mod.parent && mod.parent.parent)) {
+ throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(mod.current)];
+ }
+ return resolveModule(names, {
+ id : mod.id.slice(0, mod.id.lastIndexOf('/')),
+ parent : mod.parent.parent.parent,
+ current : mod.parent.parent.current
+ });
+ } else if (n == '.') {
+ if (!mod.parent) {
+ throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(mod.current)];
+ }
+ return resolveModule(names, {
+ parent : mod.parent.parent,
+ current : mod.parent.current,
+ id : mod.id
+ });
+ } else if (root) {
+ mod = {current : root};
+ }
+ if (!mod.current[n]) {
+ throw ["error", "invalid_require_path", 'Object has no property "'+n+'". '+JSON.stringify(mod.current)];
+ }
+ return resolveModule(names, {
+ current : mod.current[n],
+ parent : mod,
+ id : mod.id ? mod.id + '/' + n : n
+ });
+};
+
var Couch = {
// moving this away from global so we can move to json2.js later
toJSON : function (val) {
- if (typeof(val) == "undefined") {
- throw "Cannot encode 'undefined' value as JSON";
- }
- if (typeof(val) == "xml") { // E4X support
- val = val.toXMLString();
- }
- if (val === null) { return "null"; }
- return (Couch.toJSON.dispatcher[val.constructor.name])(val);
+ return JSON.stringify(val);
},
- compileFunction : function(source) {
+ compileFunction : function(source, ddoc) {
if (!source) throw(["error","not_found","missing function"]);
try {
- var functionObject = sandbox ? evalcx(source, sandbox) : eval(source);
+ if (sandbox) {
+ if (ddoc) {
+ var require = function(name, module) {
+ module = module || {};
+ var newModule = resolveModule(name.split('/'), module, ddoc);
+ var s = "function (module, exports, require) { " + newModule.current + " }";
+ try {
+ var func = sandbox ? evalcx(s, sandbox) : eval(s);
+ func.apply(sandbox, [newModule, newModule.exports, function(name) {return require(name, newModule)}]);
+ } catch(e) {
+ throw ["error","compilation_error","Module require('"+name+"') raised error "+e.toSource()];
+ }
+ return newModule.exports;
+ }
+ sandbox.require = require;
+ }
+ var functionObject = evalcx(source, sandbox);
+ } else {
+ var functionObject = eval(source);
+ }
} catch (err) {
throw(["error", "compilation_error", err.toSource() + " (" + source + ")"]);
};
@@ -38,64 +96,20 @@ var Couch = {
},
recursivelySeal : function(obj) {
// seal() is broken in current Spidermonkey
- seal(obj);
+ try {
+ seal(obj);
+ } catch (x) {
+ // Sealing of arrays broken in some SpiderMonkey versions.
+ // https://bugzilla.mozilla.org/show_bug.cgi?id=449657
+ }
for (var propname in obj) {
- if (typeof doc[propname] == "object") {
- recursivelySeal(doc[propname]);
+ if (typeof obj[propname] == "object") {
+ arguments.callee(obj[propname]);
}
}
}
}
-Couch.toJSON.subs = {'\b': '\\b', '\t': '\\t', '\n': '\\n', '\f': '\\f',
- '\r': '\\r', '"' : '\\"', '\\': '\\\\'};
-Couch.toJSON.dispatcher = {
- "Array": function(v) {
- var buf = [];
- for (var i = 0; i < v.length; i++) {
- buf.push(Couch.toJSON(v[i]));
- }
- return "[" + buf.join(",") + "]";
- },
- "Boolean": function(v) {
- return v.toString();
- },
- "Date": function(v) {
- var f = function(n) { return n < 10 ? '0' + n : n };
- return '"' + v.getUTCFullYear() + '-' +
- f(v.getUTCMonth() + 1) + '-' +
- f(v.getUTCDate()) + 'T' +
- f(v.getUTCHours()) + ':' +
- f(v.getUTCMinutes()) + ':' +
- f(v.getUTCSeconds()) + 'Z"';
- },
- "Number": function(v) {
- return isFinite(v) ? v.toString() : "null";
- },
- "Object": function(v) {
- //if (v === null) return "null";
- var buf = [];
- for (var k in v) {
- if (!v.hasOwnProperty(k) || typeof(k) !== "string" || v[k] === undefined) {
- continue;
- }
- buf.push(Couch.toJSON(k) + ": " + Couch.toJSON(v[k]));
- }
- return "{" + buf.join(",") + "}";
- },
- "String": function(v) {
- if (/["\\\x00-\x1f]/.test(v)) {
- v = v.replace(/([\x00-\x1f\\"])/g, function(a, b) {
- var c = Couch.toJSON.subs[b];
- if (c) return c;
- c = b.charCodeAt();
- return '\\u00' + Math.floor(c / 16).toString(16) + (c % 16).toString(16);
- });
- }
- return '"' + v + '"';
- }
-};
-
// prints the object as JSON, and rescues and logs any toJSON() related errors
function respond(obj) {
try {
@@ -107,11 +121,15 @@ function respond(obj) {
};
function log(message) {
- // return; // idea: query_server_config option for log level
- if (typeof message == "undefined") {
- message = "Error: attempting to log message of 'undefined'.";
+ // idea: query_server_config option for log level
+ if (typeof message == "xml") {
+ message = message.toXMLString();
} else if (typeof message != "string") {
message = Couch.toJSON(message);
}
- respond(["log", message]);
+ respond(["log", String(message)]);
};
+
+function isArray(obj) {
+ return toString.call(obj) === "[object Array]";
+}
diff --git a/share/server/views.js b/share/server/views.js
index ffe63377..2a15ee56 100644
--- a/share/server/views.js
+++ b/share/server/views.js
@@ -105,19 +105,8 @@ var Views = (function() {
// ]
//
- /*
- Immutable document support temporarily removed.
+ Couch.recursivelySeal(doc);
- Removed because the seal function no longer works on JS 1.8 arrays,
- instead returning an error. The sealing is meant to prevent map
- functions from modifying the same document that is passed to other map
- functions. However, only map functions in the same design document are
- run together, so we have a reasonable expectation they can trust each
- other. Any map fun that can't be trusted can be placed in its own
- design document, and it cannot affect other map functions.
-
- recursivelySeal(doc); // seal to prevent map functions from changing doc
- */
var buf = [];
for (var i = 0; i < State.funs.length; i++) {
map_results = [];
diff --git a/share/www/_sidebar.html b/share/www/_sidebar.html
index 6c7abc99..13727cbd 100644
--- a/share/www/_sidebar.html
+++ b/share/www/_sidebar.html
@@ -35,12 +35,12 @@ specific language governing permissions and limitations under the License.
<a href="#" class="signup">Signup</a> or <a href="#" class="login">Login</a>
</span>
<span class="loggedin">
- Welcome <a class="username">?</a>!
+ Welcome <a class="name">?</a>!
<br/>
<a href="#" class="logout">Logout</a>
</span>
<span class="loggedinadmin">
- Welcome <a class="username">?</a>!
+ Welcome <a class="name">?</a>!
<br/>
<a href="#" class="createadmin">Setup more admins</a> or
<a href="#" class="logout">Logout</a>
diff --git a/share/www/config.html b/share/www/config.html
index 8f788041..9863c8b5 100644
--- a/share/www/config.html
+++ b/share/www/config.html
@@ -20,7 +20,7 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
@@ -47,7 +47,7 @@ specific language governing permissions and limitations under the License.
var prev = null;
$.each(options, function(idx, optionName) {
var cur = idx == 0 ? row : $("<tr></tr>");
- $("<td class='name'><b></b></td>")
+ $("<td class='name' section="+sectionName+"><b></b></td>")
.find("b").text(optionName).end().appendTo(cur);
$("<td class='value'><code></code></td>")
.find("code").text(section[optionName]).end().appendTo(cur);
@@ -58,7 +58,6 @@ specific language governing permissions and limitations under the License.
row.find("th").attr("rowspan", options.length);
});
$("#config tbody tr").removeClass("odd").filter(":odd").addClass("odd");
-
$("#config tbody td.value code").makeEditable({
accept: function(newValue) {
var row = $(this).parents("tr").eq(0);
@@ -67,7 +66,40 @@ specific language governing permissions and limitations under the License.
row.find("td.value code").text(newValue);
}}, row.data("section"), row.data("option"), newValue);
}
- });
+ }).parent().parent()
+ .append($('<td><div style="text-align:center;""><a class="remove" href="#remove">x</a></div></td>'))
+ .click(function (ev) {
+ // There is a listener that stops all events below this element which is
+ // why the remove link listener has to be here
+ var n = $(ev.target).parent().parent().parent();
+ if ($(ev.target).attr('href') === "#remove" ) {
+ $.couch.config({ success: function () {location = location.href.split('#')[0];} }
+ , n.find('td.name').attr("section"), n.find('td.name').text(), null);
+ }
+ })
+ var add = $('<a href="#add">Add a new section</a>').click(function () {
+ $.showDialog("dialog/_create_config.html", {
+ submit: function(data, callback) {
+ var fail = false;
+ if (!data.section || data.section.length == 0) {
+ callback({section: "Please enter a section."});
+ fail = true;
+ }
+ if (!data.option || data.option.length == 0) {
+ callback({option: "Please enter a option."});
+ fail = true;
+ }
+ if (!data.val || data.val.length == 0) {
+ callback({val: "Please enter a value."});
+ fail = true;
+ }
+ if (fail) {return}
+ $.couch.config({ success: function () {callback();location = location.href.split('#')[0]} }
+ , data.section, data.option, data.val);
+ }
+ });
+ })
+ $("div#content").append(add)
}
});
@@ -84,13 +116,16 @@ specific language governing permissions and limitations under the License.
<strong>Note:</strong> Some configuration options may require
restarting the server to take effect after modification.
</p>
-
+ <p class="help">
+ For the strongest consistency guarantees, <tt>delayed_commits</tt> should be set to <tt>false</tt>. The default value of <tt>true</tt> is designed for single-user performance. For more details see <a href="http://wiki.apache.org/couchdb/Durability_Matrix">a discussion of durability on the CouchDB wiki</a>.
+ </p>
<table id="config" class="listing" cellspacing="0">
<caption>Configuration</caption>
<thead><tr>
<th>Section</th>
<th>Option</th>
<th>Value</th>
+ <th>Delete</th>
</tr></thead>
<tbody class="content"></tbody>
</table>
diff --git a/share/www/couch_tests.html b/share/www/couch_tests.html
index fa347764..f10bad23 100644
--- a/share/www/couch_tests.html
+++ b/share/www/couch_tests.html
@@ -20,7 +20,7 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
@@ -35,6 +35,14 @@ specific language governing permissions and limitations under the License.
$("#toolbar button.load").click(function() {
location.reload(true);
});
+ $("#toolbar button.share").click(function() {
+ $.showDialog("dialog/_share_test_reports.html", {
+ submit: function(data, callback) {
+ $.couch.replicate("test_suite_reports", "http://couchdb.couchdb.org/test_suite_reports");
+ callback();
+ }
+ });
+ });
$("#toolbar button.add").click(function() {
location = "custom_test.html";
});
@@ -52,13 +60,18 @@ specific language governing permissions and limitations under the License.
<ul id="toolbar">
<li><button class="run">Run All</button></li>
<li><button class="load">Reload</button></li>
+ <li><button class="share">Share Test Reports</button></li>
<li><button class="add">Custom Test</button></li>
+ <li class="current"></li>
</ul>
<p class="help">
<strong>Note:</strong> Each of the tests will block the browser. If the
connection to your CouchDB server is slow, running the tests will take
some time, and you'll not be able to do much with your browser while
- a test is being executed.
+ a test is being executed. <strong>Also:</strong> The test suite is designed
+ to work with Firefox (with Firebug disabled). Patches are welcome for
+ convenience compatibility with other browsers, but official support is
+ for Firefox (latest stable version) only.
</p>
<table class="listing" id="tests" cellspacing="0">
diff --git a/share/www/custom_test.html b/share/www/custom_test.html
index 6106c879..2566a000 100644
--- a/share/www/custom_test.html
+++ b/share/www/custom_test.html
@@ -20,12 +20,14 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
<script src="script/jquery.resizer.js?0.11.0"></script>
<script src="script/couch.js?0.11.0"></script>
+ <script src="script/couch_test_runner.js?0.11.0"></script>
+ <script src="script/couch_tests.js"></script>
<script>
function T(arg, desc) {
if(!arg) {
diff --git a/share/www/database.html b/share/www/database.html
index bafac223..2802ad78 100644
--- a/share/www/database.html
+++ b/share/www/database.html
@@ -20,7 +20,7 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
@@ -37,7 +37,7 @@ specific language governing permissions and limitations under the License.
$(function() {
if (page.redirecting) return;
- $("h1 strong").html('<a href="?' + page.db.name + '">' + page.db.name + '</a>');
+ $("h1 strong").text(page.db.name);
var viewPath = page.viewName || "_all_docs";
if (viewPath != "_temp_view" && viewPath != "_design_docs") {
$("h1 a.raw").attr("href", "/" + encodeURIComponent(page.db.name) +
@@ -71,15 +71,18 @@ specific language governing permissions and limitations under the License.
});
// Restore preferences/state
- $("#documents thead th.key").toggleClass("desc", $.futon.storage.get("desc"));
- var reduce = $.futon.storage.get("reduce");
+ $("#documents thead th.key").toggleClass("desc", !!$.futon.storage.get("desc"));
+ var reduce = !!$.futon.storage.get("reduce");
$("#reduce :checkbox")[0].checked = reduce;
- $("#grouplevel select").val($.futon.storage.get("group_level"));
+ $("#grouplevel select").val(parseInt($.futon.storage.get("group_level")));
$("#grouplevel").toggleClass("disabled", !reduce).find("select").each(function() {
this.disabled = !reduce;
});
- $("#perpage").val($.futon.storage.get("per_page"));
+ $("#perpage").val(parseInt($.futon.storage.get("per_page")));
+
+ var staleViews = !!$.futon.storage.get("stale");
+ $("#staleviews :checkbox")[0].checked = staleViews;
page.populateViewsMenu();
page.populateViewEditor();
@@ -94,6 +97,9 @@ specific language governing permissions and limitations under the License.
location.href = "?" + encodeURIComponent(page.db.name) +
(viewName ? "/" + viewName : "");
});
+ $("#staleviews :checkbox").click(function() {
+ $.futon.storage.set("stale", this.checked);
+ });
$("#documents thead th.key span").click(function() {
$(this).closest("th").toggleClass("desc");
page.updateDocumentListing();
@@ -115,12 +121,11 @@ specific language governing permissions and limitations under the License.
$.futon.storage.set("per_page", this.value);
});
$("#toolbar button.add").click(page.newDocument);
- $("#toolbar button.compact").click(page.compactDatabase);
- $("#toolbar button.viewcleanup").click(page.viewCleanup);
+ $("#toolbar button.compact").click(page.compactAndCleanup);
$("#toolbar button.delete").click(page.deleteDatabase);
- $("#toolbar button.compactview").click(page.compactView);
+ $("#toolbar button.security").click(page.databaseSecurity);
- $('#jumpto input').addPlaceholder("Document ID").suggest(function(text, callback) {
+ $('#jumpto input').suggest(function(text, callback) {
page.db.allDocs({
limit: 10, startkey: text, endkey: text + 'zzz',
success: function(docs) {
@@ -149,6 +154,11 @@ specific language governing permissions and limitations under the License.
<a class="raw" title="Raw view"></a>
</h1>
<div id="content">
+ <div id="staleviews">
+ <label>Stale views
+ <input name="staleviews" type="checkbox" />
+ </label>
+ </div>
<div id="switch">
<label>View: <select autocomplete="false">
<option value="_all_docs">All documents</option>
@@ -158,15 +168,14 @@ specific language governing permissions and limitations under the License.
</div>
<div id="jumpto">
<label>Jump to:
- <input type="text" name="docid" autocomplete="off" />
+ <input type="text" name="docid" placeholder="Document ID" autocomplete="off" />
</label>
</div>
<ul id="toolbar">
<li><button class="add">New Document</button></li>
- <li><button class="compact">Compact Database…</button></li>
- <li><button class="viewcleanup">View Cleanup…</button></li>
+ <li><button class="security">Security…</button></li>
+ <li><button class="compact">Compact &amp; Cleanup…</button></li>
<li><button class="delete">Delete Database…</button></li>
- <li><button class="compactview" style="display: none">Compact View…</button></li>
</ul>
<div id="viewcode" class="collapsed" style="display: none">
diff --git a/share/www/dialog/_admin_party.html b/share/www/dialog/_admin_party.html
index f52099a6..ea9fb15a 100644
--- a/share/www/dialog/_admin_party.html
+++ b/share/www/dialog/_admin_party.html
@@ -22,7 +22,7 @@ specific language governing permissions and limitations under the License.
both performance and security.
</p>
<p class="help">
- Clicking start will remove all admins from the configuration. You will
+ Clicking “Remove Admins” will remove all admins from the configuration. You will
have to recreate any admins by hand after the tests have finished.
</p>
</fieldset>
diff --git a/share/www/dialog/_compact_view.html b/share/www/dialog/_compact_cleanup.html
index e64d97e3..506417f4 100644
--- a/share/www/dialog/_compact_view.html
+++ b/share/www/dialog/_compact_cleanup.html
@@ -13,16 +13,39 @@ specific language governing permissions and limitations under the License.
-->
<form action="" method="post">
- <h2>Compact View</h2>
- <fieldset>
+ <h2>Compact &amp; Cleanup</h2>
+ <fieldset class="radiogroup">
+ <label>
+ <input type="radio" name="action" value="compact_database" checked>
+ Compact Database
+ </label>
+ <p class="help">
+ Compacting a database removes deleted documents and previous revisions.
+ It is an <strong>irreversible operation</strong> and may take
+ a while to complete for large databases.
+ </p>
+ <hr>
+ <label>
+ <input type="radio" name="action" value="compact_views">
+ Compact Views
+ </label>
<p class="help">
View compaction will affect all views in this design document. This
operation may take some time to complete. Your views will still operate
normally during compaction.
</p>
+ <hr>
+ <label>
+ <input type="radio" name="action" value="view_cleanup">
+ Cleanup Views
+ </label>
+ <p class="help">
+ Cleaning up views in a database removes old view files still stored
+ on the filesystem. It is an <strong>irreversible operation</strong>.
+ </p>
</fieldset>
<div class="buttons">
- <button type="submit">Compact</button>
+ <button type="submit">Run</button>
<button type="button" class="cancel">Cancel</button>
</div>
</form>
diff --git a/share/www/dialog/_create_admin.html b/share/www/dialog/_create_admin.html
index e4141e1d..d4aec95a 100644
--- a/share/www/dialog/_create_admin.html
+++ b/share/www/dialog/_create_admin.html
@@ -27,7 +27,7 @@ specific language governing permissions and limitations under the License.
</p>
<table summary=""><tbody><tr>
<th><label>Username:</label></th>
- <td><input type="text" name="username" size="24"></td>
+ <td><input type="text" name="name" size="24"></td>
</tr><tr>
<th><label>Password:</label></th>
<td><input type="password" name="password" size="24"></td>
diff --git a/share/www/dialog/_compact_database.html b/share/www/dialog/_create_config.html
index 5c898a3c..79e08b08 100644
--- a/share/www/dialog/_compact_database.html
+++ b/share/www/dialog/_create_config.html
@@ -13,16 +13,30 @@ specific language governing permissions and limitations under the License.
-->
<form action="" method="post">
- <h2>Compact Database</h2>
+ <h2>Create New Config Option</h2>
<fieldset>
<p class="help">
- Compacting a database removes deleted documents and previous revisions.
- It is an <strong>irreversible operation</strong> and may take
- a while to complete for large databases.
+ Please enter the section, option, and value.
</p>
+ <table summary="">
+ <tbody>
+ <tr>
+ <th><label>section:</label></th>
+ <td><input type="text" name="section" size="24"></td>
+ </tr>
+ <tr>
+ <th><label>option:</label></th>
+ <td><input type="text" name="option" size="24"></td>
+ </tr>
+ <tr>
+ <th><label>value:</label></th>
+ <td><input type="text" name="val" size="24"></td>
+ </tr>
+ </tbody>
+ </table>
</fieldset>
<div class="buttons">
- <button type="submit">Compact</button>
+ <button type="submit">Create</button>
<button type="button" class="cancel">Cancel</button>
</div>
</form>
diff --git a/share/www/dialog/_database_security.html b/share/www/dialog/_database_security.html
new file mode 100644
index 00000000..d63fa787
--- /dev/null
+++ b/share/www/dialog/_database_security.html
@@ -0,0 +1,50 @@
+<!--
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
+
+-->
+<form action="" method="post">
+ <h2>Security</h2>
+ <fieldset>
+ <p class="help">
+ Each database contains lists of admins and readers.
+ Admins and readers are each defined by <tt>names</tt> and <tt>roles</tt>, which are lists of strings.
+ </p>
+
+ <h3>Admins</h3>
+ <p class="help">Database admins can update design documents and edit the readers list.</p>
+ <table summary=""><tbody><tr>
+ <th><label>Names:</label></th>
+ <td><input type="text" name="admin_names" size="40"></td>
+ </tr><tr>
+ <th><label>Roles:</label></th>
+ <td><input type="text" name="admin_roles" size="40"></td>
+ </tr>
+ </tbody></table>
+
+ <h3>Readers</h3>
+ <p class="help">Database readers can access the database. If no readers are defined, the database is public.</p>
+ <table summary=""><tbody><tr>
+ <th><label>Names:</label></th>
+ <td><input type="text" name="reader_names" size="40"></td>
+ </tr><tr>
+ <th><label>Roles:</label></th>
+ <td><input type="text" name="reader_roles" size="40"></td>
+ </tr>
+ </tbody></table>
+
+ </fieldset>
+ <div class="buttons">
+ <button type="submit">Update</button>
+ <button type="button" class="cancel">Cancel</button>
+ </div>
+</form>
diff --git a/share/www/dialog/_login.html b/share/www/dialog/_login.html
index 959f7233..f05a5fdc 100644
--- a/share/www/dialog/_login.html
+++ b/share/www/dialog/_login.html
@@ -16,11 +16,11 @@ specific language governing permissions and limitations under the License.
<h2>Login</h2>
<fieldset>
<p class="help">
- Login to CouchDB with your username and password.
+ Login to CouchDB with your name and password.
</p>
<table summary=""><tbody><tr>
<th><label>Username:</label></th>
- <td><input type="text" name="username" size="24"></td>
+ <td><input type="text" name="name" size="24"></td>
</tr><tr>
<th><label>Password:</label></th>
<td><input type="password" name="password" size="24"></td>
diff --git a/share/www/dialog/_share_test_reports.html b/share/www/dialog/_share_test_reports.html
new file mode 100644
index 00000000..82b49a74
--- /dev/null
+++ b/share/www/dialog/_share_test_reports.html
@@ -0,0 +1,42 @@
+<!--
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
+
+-->
+<form action="" method="post">
+ <h2>Share Test Reports</h2>
+ <fieldset>
+ <p class="help">
+ After each test run, a results summary document is stored in
+ <a href="/_utils/database.html?test_suite_reports">your local
+ <tt>test_suite_reports</tt> database.</a> The data has no personally
+ identifying information, just details about the test run and your CouchDB
+ and browser versions. (Click the red link above to see what's stored.)
+ The data remains private until you click the "share" button below.
+ </p>
+ <p class="help">
+ Test reports are very valuable to the CouchDB community, and are easy to share.
+ Clicking the "share" button below triggers replication from
+ your local <tt>test_suite_reports</tt> database, to a database hosted by the
+ project.
+ </p>
+ <p class="help">
+ <a href="http://couchdb.couchdb.org/_utils/database.html?test_suite_reports">
+ Browse test reports shared by other users.</a>
+ Thank you for sharing!
+ </p>
+ </fieldset>
+ <div class="buttons">
+ <button type="submit">Share</button>
+ <button type="button" class="cancel">Cancel</button>
+ </div>
+</form>
diff --git a/share/www/dialog/_signup.html b/share/www/dialog/_signup.html
index 884b4be2..7ba3448a 100644
--- a/share/www/dialog/_signup.html
+++ b/share/www/dialog/_signup.html
@@ -21,7 +21,7 @@ specific language governing permissions and limitations under the License.
</p>
<table summary=""><tbody><tr>
<th><label>Username:</label></th>
- <td><input type="text" name="username" size="24"></td>
+ <td><input type="text" name="name" size="24"></td>
</tr><tr>
<th><label>Password:</label></th>
<td><input type="password" name="password" size="24"></td>
diff --git a/share/www/dialog/_view_cleanup.html b/share/www/dialog/_view_cleanup.html
deleted file mode 100644
index 2ede89a5..00000000
--- a/share/www/dialog/_view_cleanup.html
+++ /dev/null
@@ -1,28 +0,0 @@
-<!--
-
-Licensed under the Apache License, Version 2.0 (the "License"); you may not use
-this file except in compliance with the License. You may obtain a copy of the
-License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software distributed
-under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
-CONDITIONS OF ANY KIND, either express or implied. See the License for the
-specific language governing permissions and limitations under the License.
-
--->
-<form action="" method="post">
- <h2>View Cleanup</h2>
- <fieldset>
- <p class="help">
- Cleaning up views in a database removes old view files still stored
- on the filesystem. It is an <strong>irreversible operation</strong>.
- </p>
- </fieldset>
- <div class="buttons">
- <button type="submit">Cleanup</button>
- <button type="button" class="cancel">Cancel</button>
- </div>
-</form>
-
diff --git a/share/www/document.html b/share/www/document.html
index 772876aa..b6f42018 100644
--- a/share/www/document.html
+++ b/share/www/document.html
@@ -20,7 +20,8 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/base64.js"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
@@ -28,7 +29,7 @@ specific language governing permissions and limitations under the License.
<script src="script/futon.browse.js?0.11.0"></script>
<script src="script/futon.format.js?0.11.0"></script>
<script src="script/jquery.editinline.js?0.11.0"></script>
- <script src="script/jquery.form.js?0.11.0"></script>
+ <script src="script/jquery.form.js?2.36"></script>
<script>
var page = new $.futon.CouchDocumentPage();
diff --git a/share/www/image/key.png b/share/www/image/key.png
new file mode 100644
index 00000000..e04ed108
--- /dev/null
+++ b/share/www/image/key.png
Binary files differ
diff --git a/share/www/image/spinner.gif b/share/www/image/spinner.gif
index 19a8d4df..6239655e 100644
--- a/share/www/image/spinner.gif
+++ b/share/www/image/spinner.gif
Binary files differ
diff --git a/share/www/index.html b/share/www/index.html
index 2cf04c87..975f5986 100644
--- a/share/www/index.html
+++ b/share/www/index.html
@@ -20,7 +20,7 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
@@ -34,7 +34,7 @@ specific language governing permissions and limitations under the License.
this.updateSelection(location.pathname + "index.html");
});
}
- var dbsPerPage = $.futon.storage.get("per_page");
+ var dbsPerPage = parseInt($.futon.storage.get("per_page"));
if (dbsPerPage) $("#perpage").val(dbsPerPage);
$("#perpage").change(function() {
page.updateDatabaseListing();
diff --git a/share/www/replicator.html b/share/www/replicator.html
index 5a09ca16..32f3e20f 100644
--- a/share/www/replicator.html
+++ b/share/www/replicator.html
@@ -20,7 +20,7 @@ specific language governing permissions and limitations under the License.
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
<script src="script/sha1.js"></script>
- <script src="script/jquery.js?1.3.2"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
<script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
@@ -41,6 +41,7 @@ specific language governing permissions and limitations under the License.
$.couch.allDbs({
success: function(dbs) {
+ dbs.sort();
$("fieldset select").each(function() {
var select = this;
$.each(dbs, function(idx, dbName) {
@@ -77,17 +78,28 @@ specific language governing permissions and limitations under the License.
$("#records tbody.content").empty();
var source = $("#from_local")[0].checked ? $("#from_name").val() : $("#from_url").val();
var target = $("#to_local")[0].checked ? $("#to_name").val() : $("#to_url").val();
+ var repOpts = {};
+ if ($("#continuous")[0].checked) {
+ repOpts.continuous = true;
+ }
$.couch.replicate(source, target, {
success: function(resp) {
- $.each(resp.history, function(idx, record) {
+ if (resp._local_id) {
$("<tr><th></th></tr>")
- .find("th").text(JSON.stringify(record)).end()
+ .find("th").text(JSON.stringify(resp)).end()
.appendTo("#records tbody.content");
- });
- $("#records tbody tr").removeClass("odd").filter(":odd").addClass("odd");
- $("#records tbody.footer td").text("Replication session " + resp.session_id);
+ $("#records tbody tr").removeClass("odd").filter(":odd").addClass("odd");
+ } else {
+ $.each(resp.history, function(idx, record) {
+ $("<tr><th></th></tr>")
+ .find("th").text(JSON.stringify(record)).end()
+ .appendTo("#records tbody.content");
+ });
+ $("#records tbody tr").removeClass("odd").filter(":odd").addClass("odd");
+ $("#records tbody.footer td").text("Replication session " + resp.session_id);
+ }
}
- });
+ }, repOpts);
});
});
</script>
@@ -103,25 +115,30 @@ specific language governing permissions and limitations under the License.
<fieldset id="source">
<legend>Replicate changes from:</legend>
<p>
- <label><input type="radio" id="from_local" name="from_type" value="local" checked> Local</label>
- <label>database: <select id="from_name" name="from_name"></select></label>
+ <input type="radio" id="from_local" name="from_type" value="local" checked>
+ <label for="from_local">Local Database: </label>
+ <select id="from_name" name="from_name"></select>
</p><p>
- <label><input type="radio" id="from_to_remote" name="from_type" value="remote"> Remote</label>
- <label>database: <input type="text" id="from_url" name="from_url" size="30" value="http://" disabled></label>
+ <input type="radio" id="from_to_remote" name="from_type" value="remote">
+ <label for="from_to_remote">Remote database: </label>
+ <input type="text" id="from_url" name="from_url" size="30" value="http://" disabled>
</p>
</fieldset>
<p class="swap"><button id="swap" tabindex="99">⇄</button></p>
<fieldset id="target">
<legend>to:</legend>
<p>
- <label><input type="radio" id="to_local" name="to_type" value="local" checked> Local</label>
- <label>database: <select id="to_name" name="to_name"></select></label>
+ <input type="radio" id="to_local" name="to_type" value="local" checked>
+ <label for="to_local">Local database: </label>
+ <select id="to_name" name="to_name"></select>
</p><p>
- <label><input type="radio" id="to_remote" name="to_type" value="remote"> Remote</label>
- <label>database: <input type="text" id="to_url" name="to_url" size="30" value="http://" disabled></label>
+ <input type="radio" id="to_remote" name="to_type" value="remote">
+ <label for="to_remote">Remote database: </label>
+ <input type="text" id="to_url" name="to_url" size="30" value="http://" disabled>
</p>
</fieldset>
<p class="actions">
+ <label><input type="checkbox" name="continuous" value="continuous" id="continuous"> Continuous</label>
<button id="replicate" type="button">Replicate</button>
</p>
</form>
diff --git a/share/www/script/base64.js b/share/www/script/base64.js
new file mode 100644
index 00000000..e0aab303
--- /dev/null
+++ b/share/www/script/base64.js
@@ -0,0 +1,124 @@
+/* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
+ * Version: 1.0
+ * LastModified: Dec 25 1999
+ * This library is free. You can redistribute it and/or modify it.
+ */
+ /* Modified by Chris Anderson to not use CommonJS */
+ /* Modified by Dan Webb not to require Narwhal's binary library */
+
+var Base64 = {};
+(function(exports) {
+
+ var encodeChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+ var decodeChars = [
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63,
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -1, -1, -1,
+ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1,
+ -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40,
+ 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1
+ ];
+
+ exports.encode = function (str) {
+ var out, i, length;
+ var c1, c2, c3;
+
+ length = len(str);
+ i = 0;
+ out = [];
+ while(i < length) {
+ c1 = str.charCodeAt(i++) & 0xff;
+ if(i == length)
+ {
+ out.push(encodeChars.charCodeAt(c1 >> 2));
+ out.push(encodeChars.charCodeAt((c1 & 0x3) << 4));
+ out.push("=".charCodeAt(0));
+ out.push("=".charCodeAt(0));
+ break;
+ }
+ c2 = str.charCodeAt(i++);
+ if(i == length)
+ {
+ out.push(encodeChars.charCodeAt(c1 >> 2));
+ out.push(encodeChars.charCodeAt(((c1 & 0x3)<< 4) | ((c2 & 0xF0) >> 4)));
+ out.push(encodeChars.charCodeAt((c2 & 0xF) << 2));
+ out.push("=".charCodeAt(0));
+ break;
+ }
+ c3 = str.charCodeAt(i++);
+ out.push(encodeChars.charCodeAt(c1 >> 2));
+ out.push(encodeChars.charCodeAt(((c1 & 0x3)<< 4) | ((c2 & 0xF0) >> 4)));
+ out.push(encodeChars.charCodeAt(((c2 & 0xF) << 2) | ((c3 & 0xC0) >>6)));
+ out.push(encodeChars.charCodeAt(c3 & 0x3F));
+ }
+
+ var str = "";
+ out.forEach(function(chr) { str += String.fromCharCode(chr) });
+ return str;
+ };
+
+ exports.decode = function (str) {
+ var c1, c2, c3, c4;
+ var i, length, out;
+
+ length = len(str);
+ i = 0;
+ out = [];
+ while(i < length) {
+ /* c1 */
+ do {
+ c1 = decodeChars[str.charCodeAt(i++) & 0xff];
+ } while(i < length && c1 == -1);
+ if(c1 == -1)
+ break;
+
+ /* c2 */
+ do {
+ c2 = decodeChars[str.charCodeAt(i++) & 0xff];
+ } while(i < length && c2 == -1);
+ if(c2 == -1)
+ break;
+
+ out.push(String.fromCharCode((c1 << 2) | ((c2 & 0x30) >> 4)));
+
+ /* c3 */
+ do {
+ c3 = str.charCodeAt(i++) & 0xff;
+ if(c3 == 61)
+ return out.join('');
+ c3 = decodeChars[c3];
+ } while(i < length && c3 == -1);
+ if(c3 == -1)
+ break;
+
+ out.push(String.fromCharCode(((c2 & 0xF) << 4) | ((c3 & 0x3C) >> 2)));
+
+ /* c4 */
+ do {
+ c4 = str.charCodeAt(i++) & 0xff;
+ if(c4 == 61)
+ return out.join('');
+ c4 = decodeChars[c4];
+ } while(i < length && c4 == -1);
+
+ if(c4 == -1)
+ break;
+
+ out.push(String.fromCharCode(((c3 & 0x03) << 6) | c4));
+ }
+
+ return out.join('');
+ };
+
+ var len = function (object) {
+ if (object.length !== undefined) {
+ return object.length;
+ } else if (object.getLength !== undefined) {
+ return object.getLength();
+ } else {
+ return undefined;
+ }
+ };
+})(Base64);
diff --git a/share/www/script/couch.js b/share/www/script/couch.js
index 21ea39b3..bcc19652 100644
--- a/share/www/script/couch.js
+++ b/share/www/script/couch.js
@@ -22,17 +22,17 @@ function CouchDB(name, httpHeaders) {
this.last_req = null;
this.request = function(method, uri, requestOptions) {
- requestOptions = requestOptions || {}
- requestOptions.headers = combine(requestOptions.headers, httpHeaders)
- return CouchDB.request(method, uri, requestOptions);
- }
+ requestOptions = requestOptions || {};
+ requestOptions.headers = combine(requestOptions.headers, httpHeaders);
+ return CouchDB.request(method, uri, requestOptions);
+ };
// Creates the database on the server
this.createDb = function() {
this.last_req = this.request("PUT", this.uri);
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// Deletes the database on the server
this.deleteDb = function() {
@@ -42,7 +42,7 @@ function CouchDB(name, httpHeaders) {
}
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// Save a document to the database
this.save = function(doc, options) {
@@ -57,7 +57,7 @@ function CouchDB(name, httpHeaders) {
var result = JSON.parse(this.last_req.responseText);
doc._rev = result.rev;
return result;
- }
+ };
// Open a document from the database
this.open = function(docId, options) {
@@ -68,7 +68,7 @@ function CouchDB(name, httpHeaders) {
}
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// Deletes a document from the database
this.deleteDoc = function(doc) {
@@ -79,7 +79,7 @@ function CouchDB(name, httpHeaders) {
doc._rev = result.rev; //record rev in input document
doc._deleted = true;
return result;
- }
+ };
// Deletes an attachment from a document
this.deleteDocAttachment = function(doc, attachment_name) {
@@ -89,18 +89,18 @@ function CouchDB(name, httpHeaders) {
var result = JSON.parse(this.last_req.responseText);
doc._rev = result.rev; //record rev in input document
return result;
- }
+ };
this.bulkSave = function(docs, options) {
// first prepoulate the UUIDs for new documents
- var newCount = 0
+ var newCount = 0;
for (var i=0; i<docs.length; i++) {
if (docs[i]._id == undefined) {
newCount++;
}
}
var newUuids = CouchDB.newUuids(docs.length);
- var newCount = 0
+ var newCount = 0;
for (var i=0; i<docs.length; i++) {
if (docs[i]._id == undefined) {
docs[i]._id = newUuids.pop();
@@ -121,19 +121,19 @@ function CouchDB(name, httpHeaders) {
CouchDB.maybeThrowError(this.last_req);
var results = JSON.parse(this.last_req.responseText);
for (var i = 0; i < docs.length; i++) {
- if(results[i].rev) {
+ if(results[i] && results[i].rev) {
docs[i]._rev = results[i].rev;
}
}
return results;
}
- }
+ };
this.ensureFullCommit = function() {
this.last_req = this.request("POST", this.uri + "_ensure_full_commit");
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// Applies the map function to the contents of database and returns the results.
this.query = function(mapFun, reduceFun, options, keys, language) {
@@ -163,7 +163,7 @@ function CouchDB(name, httpHeaders) {
});
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
this.view = function(viewname, options, keys) {
var viewParts = viewname.split('/');
@@ -182,27 +182,21 @@ function CouchDB(name, httpHeaders) {
}
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// gets information about the database
this.info = function() {
this.last_req = this.request("GET", this.uri);
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// gets information about a design doc
this.designInfo = function(docid) {
this.last_req = this.request("GET", this.uri + docid + "/_info");
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
-
- this.viewCleanup = function() {
- this.last_req = this.request("POST", this.uri + "_view_cleanup");
- CouchDB.maybeThrowError(this.last_req);
- return JSON.parse(this.last_req.responseText);
- }
+ };
this.allDocs = function(options,keys) {
if(!keys) {
@@ -217,37 +211,30 @@ function CouchDB(name, httpHeaders) {
}
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
this.designDocs = function() {
return this.allDocs({startkey:"_design", endkey:"_design0"});
};
- this.changes = function(options,keys) {
- var req = null;
- if(!keys) {
- req = this.request("GET", this.uri + "_changes" + encodeOptions(options));
- } else {
- req = this.request("POST", this.uri + "_changes" + encodeOptions(options), {
- headers: {"Content-Type": "application/json"},
- body: JSON.stringify({keys:keys})
- });
- }
- CouchDB.maybeThrowError(req);
- return JSON.parse(req.responseText);
- }
+ this.changes = function(options) {
+ this.last_req = this.request("GET", this.uri + "_changes"
+ + encodeOptions(options));
+ CouchDB.maybeThrowError(this.last_req);
+ return JSON.parse(this.last_req.responseText);
+ };
this.compact = function() {
this.last_req = this.request("POST", this.uri + "_compact");
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
this.viewCleanup = function() {
this.last_req = this.request("POST", this.uri + "_view_cleanup");
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
this.setDbProperty = function(propId, propValue) {
this.last_req = this.request("PUT", this.uri + propId,{
@@ -255,37 +242,37 @@ function CouchDB(name, httpHeaders) {
});
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
this.getDbProperty = function(propId) {
this.last_req = this.request("GET", this.uri + propId);
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
- this.setAdmins = function(adminsArray) {
- this.last_req = this.request("PUT", this.uri + "_admins",{
- body:JSON.stringify(adminsArray)
+ this.setSecObj = function(secObj) {
+ this.last_req = this.request("PUT", this.uri + "_security",{
+ body:JSON.stringify(secObj)
});
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
- this.getAdmins = function() {
- this.last_req = this.request("GET", this.uri + "_admins");
+ this.getSecObj = function() {
+ this.last_req = this.request("GET", this.uri + "_security");
CouchDB.maybeThrowError(this.last_req);
return JSON.parse(this.last_req.responseText);
- }
+ };
// Convert a options object to an url query string.
// ex: {key:'value',key2:'value2'} becomes '?key="value"&key2="value2"'
function encodeOptions(options) {
- var buf = []
+ var buf = [];
if (typeof(options) == "object" && options !== null) {
for (var name in options) {
- if (!options.hasOwnProperty(name)) { continue };
+ if (!options.hasOwnProperty(name)) { continue; };
var value = options[name];
- if (name == "key" || name == "startkey" || name == "endkey") {
+ if (name == "key" || name == "keys" || name == "startkey" || name == "endkey") {
value = toJSON(value);
}
buf.push(encodeURIComponent(name) + "=" + encodeURIComponent(value));
@@ -321,12 +308,13 @@ function CouchDB(name, httpHeaders) {
// CouchDB.* functions (except for calls to request itself).
// Use this from callers to check HTTP status or header values of requests.
CouchDB.last_req = null;
+CouchDB.urlPrefix = '';
-CouchDB.login = function(username, password) {
+CouchDB.login = function(name, password) {
CouchDB.last_req = CouchDB.request("POST", "/_session", {
headers: {"Content-Type": "application/x-www-form-urlencoded",
"X-CouchDB-WWW-Authenticate": "Cookie"},
- body: "username=" + encodeURIComponent(username) + "&password="
+ body: "name=" + encodeURIComponent(name) + "&password="
+ encodeURIComponent(password)
});
return JSON.parse(CouchDB.last_req.responseText);
@@ -338,7 +326,7 @@ CouchDB.logout = function() {
"X-CouchDB-WWW-Authenticate": "Cookie"}
});
return JSON.parse(CouchDB.last_req.responseText);
-}
+};
CouchDB.session = function(options) {
options = options || {};
@@ -350,7 +338,7 @@ CouchDB.session = function(options) {
CouchDB.user_prefix = "org.couchdb.user:";
CouchDB.prepareUserDoc = function(user_doc, new_password) {
- user_doc._id = user_doc._id || CouchDB.user_prefix + user_doc.username;
+ user_doc._id = user_doc._id || CouchDB.user_prefix + user_doc.name;
if (new_password) {
// handle the password crypto
user_doc.salt = CouchDB.newUuids(1)[0];
@@ -358,7 +346,7 @@ CouchDB.prepareUserDoc = function(user_doc, new_password) {
}
user_doc.type = "user";
if (!user_doc.roles) {
- user_doc.roles = []
+ user_doc.roles = [];
}
return user_doc;
};
@@ -382,7 +370,7 @@ CouchDB.getVersion = function() {
CouchDB.last_req = CouchDB.request("GET", "/");
CouchDB.maybeThrowError(CouchDB.last_req);
return JSON.parse(CouchDB.last_req.responseText).version;
-}
+};
CouchDB.replicate = function(source, target, rep_options) {
rep_options = rep_options || {};
@@ -396,7 +384,7 @@ CouchDB.replicate = function(source, target, rep_options) {
});
CouchDB.maybeThrowError(CouchDB.last_req);
return JSON.parse(CouchDB.last_req.responseText);
-}
+};
CouchDB.newXhr = function() {
if (typeof(XMLHttpRequest) != "undefined") {
@@ -406,11 +394,17 @@ CouchDB.newXhr = function() {
} else {
throw new Error("No XMLHTTPRequest support detected");
}
-}
+};
CouchDB.request = function(method, uri, options) {
- options = options || {};
+ options = typeof(options) == 'object' ? options : {};
+ options.headers = typeof(options.headers) == 'object' ? options.headers : {};
+ options.headers["Content-Type"] = options.headers["Content-Type"] || options.headers["content-type"] || "application/json";
+ options.headers["Accept"] = options.headers["Accept"] || options.headers["accept"] || "application/json";
var req = CouchDB.newXhr();
+ if(uri.substr(0, CouchDB.protocol.length) != CouchDB.protocol) {
+ uri = CouchDB.urlPrefix + uri;
+ }
req.open(method, uri, false);
if (options.headers) {
var headers = options.headers;
@@ -421,7 +415,7 @@ CouchDB.request = function(method, uri, options) {
}
req.send(options.body || "");
return req;
-}
+};
CouchDB.requestStats = function(module, key, test) {
var query_arg = "";
@@ -432,7 +426,7 @@ CouchDB.requestStats = function(module, key, test) {
var url = "/_stats/" + module + "/" + key + query_arg;
var stat = CouchDB.request("GET", url).responseText;
return JSON.parse(stat)[module][key];
-}
+};
CouchDB.uuids_cache = [];
@@ -455,7 +449,7 @@ CouchDB.newUuids = function(n, buf) {
CouchDB.uuids_cache.concat(result.uuids.slice(0, buf));
return result.uuids.slice(buf);
}
-}
+};
CouchDB.maybeThrowError = function(req) {
if (req.status >= 400) {
diff --git a/share/www/script/couch_test_runner.js b/share/www/script/couch_test_runner.js
index 237f9312..56787e9a 100644
--- a/share/www/script/couch_test_runner.js
+++ b/share/www/script/couch_test_runner.js
@@ -14,6 +14,13 @@
function loadScript(url) {
+ // disallow loading remote URLs
+ if((url.substr(0, 7) == "http://")
+ || (url.substr(0, 2) == "//")
+ || (url.substr(0, 5) == "data:")
+ || (url.substr(0, 11) == "javsacript:")) {
+ throw "Not loading remote test scripts";
+ }
if (typeof document != "undefined") document.write('<script src="'+url+'"></script>');
};
@@ -21,7 +28,7 @@ function patchTest(fun) {
var source = fun.toString();
var output = "";
var i = 0;
- var testMarker = "T("
+ var testMarker = "T(";
while (i < source.length) {
var testStart = source.indexOf(testMarker, i);
if (testStart == -1) {
@@ -72,6 +79,7 @@ function runTest(button, callback, debug, noSave) {
var row = currentRow = $(button).parents("tr").get(0);
$("td.status", row).removeClass("error").removeClass("failure").removeClass("success");
$("td", row).text("");
+ $("#toolbar li.current").text("Running: "+row.id);
var testFun = couchTests[row.id];
function run() {
numFailures = 0;
@@ -103,6 +111,7 @@ function runTest(button, callback, debug, noSave) {
var duration = new Date().getTime() - start;
$("td.status", row).removeClass("running").addClass(status).text(status);
$("td.duration", row).text(duration + "ms");
+ $("#toolbar li.current").text("Finished: "+row.id);
updateTestsFooter();
currentRow = null;
if (callback) callback();
@@ -138,7 +147,8 @@ function setupAdminParty(fun) {
}
};
$.couch.session({
- success : function(userCtx) {
+ success : function(resp) {
+ var userCtx = resp.userCtx;
if (userCtx.name && userCtx.roles.indexOf("_admin") != -1) {
// admin but not admin party. dialog offering to make admin party
$.showDialog("dialog/_admin_party.html", {
@@ -229,13 +239,13 @@ function saveTestReport(report) {
$.couch.info({success : function(node_info) {
report.node = node_info;
db.saveDoc(report);
- }})
+ }});
};
var createDb = function() {
db.create({success: function() {
db.info({success:saveReport});
}});
- }
+ };
db.info({error: createDb, success:saveReport});
}
};
@@ -299,12 +309,18 @@ function T(arg1, arg2, testName) {
.find("code").text(message).end()
.appendTo($("td.details ol", currentRow));
}
- numFailures += 1
+ numFailures += 1;
}
}
function TEquals(expected, actual, testName) {
- T(equals(expected, actual), "expected '" + expected + "', got '" + actual + "'", testName);
+ T(equals(expected, actual), "expected '" + repr(expected) +
+ "', got '" + repr(actual) + "'", testName);
+}
+
+function TEqualsIgnoreCase(expected, actual, testName) {
+ T(equals(expected.toUpperCase(), actual.toUpperCase()), "expected '" + repr(expected) +
+ "', got '" + repr(actual) + "'", testName);
}
function equals(a,b) {
@@ -326,3 +342,96 @@ function repr(val) {
}
}
+function makeDocs(start, end, templateDoc) {
+ var templateDocSrc = templateDoc ? JSON.stringify(templateDoc) : "{}";
+ if (end === undefined) {
+ end = start;
+ start = 0;
+ }
+ var docs = [];
+ for (var i = start; i < end; i++) {
+ var newDoc = eval("(" + templateDocSrc + ")");
+ newDoc._id = (i).toString();
+ newDoc.integer = i;
+ newDoc.string = (i).toString();
+ docs.push(newDoc);
+ }
+ return docs;
+}
+
+function run_on_modified_server(settings, fun) {
+ try {
+ // set the settings
+ for(var i=0; i < settings.length; i++) {
+ var s = settings[i];
+ var xhr = CouchDB.request("PUT", "/_config/" + s.section + "/" + s.key, {
+ body: JSON.stringify(s.value),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ CouchDB.maybeThrowError(xhr);
+ s.oldValue = xhr.responseText;
+ }
+ // run the thing
+ fun();
+ } finally {
+ // unset the settings
+ for(var j=0; j < i; j++) {
+ var s = settings[j];
+ if(s.oldValue == "\"\"\n") { // unset value
+ CouchDB.request("DELETE", "/_config/" + s.section + "/" + s.key, {
+ headers: {"X-Couch-Persist": "false"}
+ });
+ } else {
+ CouchDB.request("PUT", "/_config/" + s.section + "/" + s.key, {
+ body: s.oldValue,
+ headers: {"X-Couch-Persist": "false"}
+ });
+ }
+ }
+ }
+}
+
+function stringFun(fun) {
+ var string = fun.toSource ? fun.toSource() : "(" + fun.toString() + ")";
+ return string;
+}
+
+function waitForSuccess(fun, tag) {
+ var start = new Date();
+ while(true) {
+ if (new Date() - start > 5000) {
+ throw("timeout: "+tag);
+ } else {
+ try {
+ fun();
+ break;
+ } catch (e) {}
+ // sync http req allow async req to happen
+ CouchDB.request("GET", "/test_suite_db/?tag="+encodeURIComponent(tag));
+ }
+ }
+}
+
+function waitForRestart() {
+ var waiting = true;
+ while (waiting) {
+ try {
+ CouchDB.request("GET", "/");
+ CouchDB.request("GET", "/");
+ waiting = false;
+ } catch(e) {
+ // the request will fail until restart completes
+ }
+ }
+};
+
+function restartServer() {
+ var xhr;
+ try {
+ CouchDB.request("POST", "/_restart");
+ } catch(e) {
+ // this request may sometimes fail
+ }
+ waitForRestart();
+}
+
diff --git a/share/www/script/couch_tests.js b/share/www/script/couch_tests.js
index 5ae4f1d9..0b8f9288 100644
--- a/share/www/script/couch_tests.js
+++ b/share/www/script/couch_tests.js
@@ -13,12 +13,15 @@
// Used by replication test
if (typeof window == 'undefined' || !window) {
CouchDB.host = "127.0.0.1:5984";
+ CouchDB.protocol = "http://";
CouchDB.inBrowser = false;
} else {
CouchDB.host = window.location.host;
CouchDB.inBrowser = true;
+ CouchDB.protocol = window.location.protocol + "//";
}
+CouchDB.urlPrefix = "..";
var couchTests = {};
function loadTest(file) {
@@ -33,7 +36,9 @@ loadTest("attachments.js");
loadTest("attachments_multipart.js");
loadTest("attachment_names.js");
loadTest("attachment_paths.js");
+loadTest("attachment_ranges.js");
loadTest("attachment_views.js");
+loadTest("auth_cache.js");
loadTest("batch_save.js");
loadTest("bulk_docs.js");
loadTest("changes.js");
@@ -57,18 +62,23 @@ loadTest("jsonp.js");
loadTest("large_docs.js");
loadTest("list_views.js");
loadTest("lots_of_docs.js");
+loadTest("method_override.js");
loadTest("multiple_rows.js");
loadScript("script/oauth.js");
loadScript("script/sha1.js");
loadTest("oauth.js");
+loadTest("proxyauth.js");
loadTest("purge.js");
+loadTest("reader_acl.js");
loadTest("recreate_doc.js");
loadTest("reduce.js");
loadTest("reduce_builtin.js");
loadTest("reduce_false.js");
loadTest("reduce_false_temp.js");
loadTest("replication.js");
+loadTest("replicator_db.js");
loadTest("rev_stemming.js");
+loadTest("rewrite.js");
loadTest("security_validation.js");
loadTest("show_documents.js");
loadTest("stats.js");
@@ -79,6 +89,7 @@ loadTest("uuids.js");
loadTest("view_collation.js");
loadTest("view_collation_raw.js");
loadTest("view_conflicts.js");
+loadTest("view_compaction.js");
loadTest("view_errors.js");
loadTest("view_include_docs.js");
loadTest("view_multi_key_all_docs.js");
@@ -87,64 +98,7 @@ loadTest("view_multi_key_temp.js");
loadTest("view_offsets.js");
loadTest("view_pagination.js");
loadTest("view_sandboxing.js");
+loadTest("view_update_seq.js");
loadTest("view_xml.js");
// keep sorted
-
-function makeDocs(start, end, templateDoc) {
- var templateDocSrc = templateDoc ? JSON.stringify(templateDoc) : "{}"
- if (end === undefined) {
- end = start;
- start = 0;
- }
- var docs = []
- for (var i = start; i < end; i++) {
- var newDoc = eval("(" + templateDocSrc + ")");
- newDoc._id = (i).toString();
- newDoc.integer = i;
- newDoc.string = (i).toString();
- docs.push(newDoc)
- }
- return docs;
-}
-
-function run_on_modified_server(settings, fun) {
- try {
- // set the settings
- for(var i=0; i < settings.length; i++) {
- var s = settings[i];
- var xhr = CouchDB.request("PUT", "/_config/" + s.section + "/" + s.key, {
- body: JSON.stringify(s.value),
- headers: {"X-Couch-Persist": "false"}
- });
- CouchDB.maybeThrowError(xhr);
- s.oldValue = xhr.responseText;
- }
- // run the thing
- fun();
- } finally {
- // unset the settings
- for(var j=0; j < i; j++) {
- var s = settings[j];
- if(s.oldValue == "\"\"\n") { // unset value
- CouchDB.request("DELETE", "/_config/" + s.section + "/" + s.key, {
- headers: {"X-Couch-Persist": "false"}
- });
- } else {
- CouchDB.request("PUT", "/_config/" + s.section + "/" + s.key, {
- body: s.oldValue,
- headers: {"X-Couch-Persist": "false"}
- });
- }
- }
- }
-}
-
-function stringFun(fun) {
- var string = fun.toSource ? fun.toSource() : "(" + fun.toString() + ")";
- return string;
-}
-
-function restartServer() {
- CouchDB.request("POST", "/_restart");
-}
diff --git a/share/www/script/futon.browse.js b/share/www/script/futon.browse.js
index 4d06d283..17975de2 100644
--- a/share/www/script/futon.browse.js
+++ b/share/www/script/futon.browse.js
@@ -62,7 +62,8 @@
.find("td.size").text($.futon.formatSize(info.disk_size)).end()
.find("td.count").text(info.doc_count).end()
.find("td.seq").text(info.update_seq);
- }
+ },
+ error : function() {}
});
});
$("#databases tbody tr:odd").addClass("odd");
@@ -96,7 +97,10 @@
// Page class for browse/database.html
CouchDatabasePage: function() {
var urlParts = location.search.substr(1).split("/");
- var dbName = decodeURIComponent(urlParts.shift());
+ var dbName = decodeURIComponent(urlParts.shift())
+
+ var dbNameRegExp = new RegExp("[^a-z0-9\_\$\(\)\+\/\-]", "g");
+ dbName = dbName.replace(dbNameRegExp, "");
$.futon.storage.declareWithPrefix(dbName + ".", {
desc: {},
@@ -106,7 +110,8 @@
reduce: {},
group_level: {defaultValue: 100},
per_page: {defaultValue: 10},
- view: {defaultValue: ""}
+ view: {defaultValue: ""},
+ stale: {defaultValue: false}
});
var viewName = (urlParts.length > 0) ? urlParts.join("/") : null;
@@ -117,7 +122,7 @@
if (viewName) {
this.redirecting = true;
location.href = "database.html?" + encodeURIComponent(dbName) +
- "/" + viewName;
+ "/" + encodeURIComponent(viewName);
}
}
var db = $.couch.db(dbName);
@@ -133,47 +138,33 @@
var templates = {
javascript: "function(doc) {\n emit(null, doc);\n}",
python: "def fun(doc):\n yield None, doc",
- ruby: "{|doc|\n emit(nil, doc);\n}"
+ ruby: "lambda {|doc|\n emit(nil, doc);\n}"
}
this.newDocument = function() {
location.href = "document.html?" + encodeURIComponent(db.name);
}
- this.compactDatabase = function() {
- $.showDialog("dialog/_compact_database.html", {
- submit: function(data, callback) {
- db.compact({
- success: function(resp) {
- callback();
- }
- });
- }
- });
- }
-
- this.viewCleanup = function() {
- $.showDialog("dialog/_view_cleanup.html", {
- submit: function(data, callback) {
- db.viewCleanup({
- success: function(resp) {
- callback();
- }
- });
- }
- });
- }
-
- this.compactView = function() {
- var groupname = page.viewName.substring(8,
- page.viewName.indexOf('/_view'));
- $.showDialog("dialog/_compact_view.html", {
+ this.compactAndCleanup = function() {
+ $.showDialog("dialog/_compact_cleanup.html", {
submit: function(data, callback) {
- db.compactView(groupname, {
- success: function(resp) {
- callback();
- }
- });
+ switch (data.action) {
+ case "compact_database":
+ db.compact({success: function(resp) { callback() }});
+ break;
+ case "compact_views":
+ var idx = page.viewName.indexOf("/_view");
+ if (idx == -1) {
+ alert("Compact Views requires focus on a view!");
+ } else {
+ var groupname = page.viewName.substring(8, idx);
+ db.compactView(groupname, {success: function(resp) { callback() }});
+ }
+ break;
+ case "view_cleanup":
+ db.viewCleanup({success: function(resp) { callback() }});
+ break;
+ }
}
});
}
@@ -197,6 +188,76 @@
});
}
+ this.databaseSecurity = function() {
+ $.showDialog("dialog/_database_security.html", {
+ load : function(d) {
+ db.getDbProperty("_security", {
+ success: function(r) {
+ ["admin", "reader"].forEach(function(key) {
+ var names = [];
+ var roles = [];
+
+ if (r && typeof r[key + "s"] === "object") {
+ if ($.isArray(r[key + "s"]["names"])) {
+ names = r[key + "s"]["names"];
+ }
+ if ($.isArray(r[key + "s"]["roles"])) {
+ roles = r[key + "s"]["roles"];
+ }
+ }
+
+ $("input[name=" + key + "_names]", d).val(JSON.stringify(names));
+ $("input[name=" + key + "_roles]", d).val(JSON.stringify(roles));
+ });
+ }
+ });
+ },
+ // maybe this should be 2 forms
+ submit: function(data, callback) {
+ var errors = {};
+ var secObj = {
+ admins: {
+ names: [],
+ roles: []
+ },
+ readers: {
+ names: [],
+ roles: []
+ }
+ };
+
+ ["admin", "reader"].forEach(function(key) {
+ var names, roles;
+
+ try {
+ names = JSON.parse(data[key + "_names"]);
+ } catch(e) { }
+ try {
+ roles = JSON.parse(data[key + "_roles"]);
+ } catch(e) { }
+
+ if ($.isArray(names)) {
+ secObj[key + "s"]["names"] = names;
+ } else {
+ errors[key + "_names"] = "The " + key +
+ " names must be an array of strings";
+ }
+ if ($.isArray(roles)) {
+ secObj[key + "s"]["roles"] = roles;
+ } else {
+ errors[key + "_roles"] = "The " + key +
+ " roles must be an array of strings";
+ }
+ });
+
+ if ($.isEmptyObject(errors)) {
+ db.setDbProperty("_security", secObj);
+ }
+ callback(errors);
+ }
+ });
+ }
+
this.populateViewEditor = function() {
if (viewName.match(/^_design\//)) {
page.revertViewChanges(function() {
@@ -318,7 +379,8 @@
var path = $.couch.encodeDocId(doc._id) + "/_view/" +
encodeURIComponent(viewNames[j]);
var option = $(document.createElement("option"))
- .attr("value", path).text(viewNames[j]).appendTo(optGroup);
+ .attr("value", path).text(encodeURIComponent(viewNames[j]))
+ .appendTo(optGroup);
if (path == viewName) {
option[0].selected = true;
}
@@ -354,7 +416,7 @@
}
var viewCode = resp.views[localViewName];
page.viewLanguage = resp.language || "javascript";
- $("#language").val(page.viewLanguage);
+ $("#language").val(encodeURIComponent(page.viewLanguage));
page.updateViewEditor(viewCode.map, viewCode.reduce || "");
$("#viewcode button.revert, #viewcode button.save").attr("disabled", "disabled");
page.storedViewCode = viewCode;
@@ -366,7 +428,7 @@
page.updateViewEditor(page.storedViewCode.map,
page.storedViewCode.reduce || "");
page.viewLanguage = page.storedViewLanguage;
- $("#language").val(page.viewLanguage);
+ $("#language").val(encodeURIComponent(page.viewLanguage));
$("#viewcode button.revert, #viewcode button.save").attr("disabled", "disabled");
page.isDirty = false;
if (callback) callback();
@@ -450,7 +512,8 @@
callback({
docid: "Cannot save to " + data.docid +
" because its language is \"" + doc.language +
- "\", not \"" + page.viewLanguage + "\"."
+ "\", not \"" +
+ encodeURIComponent(page.viewLanguage) + "\"."
});
return;
}
@@ -620,7 +683,7 @@
if (row.id) {
$("<td class='key'><a href='document.html?" + encodeURIComponent(db.name) +
"/" + $.couch.encodeDocId(row.id) + "'><strong></strong><br>" +
- "<span class='docid'>ID:&nbsp;" + row.id + "</span></a></td>")
+ "<span class='docid'>ID:&nbsp;" + $.futon.escape(row.id) + "</span></a></td>")
.find("strong").text(key).end()
.appendTo(tr);
} else {
@@ -712,6 +775,11 @@
db.query(currentMapCode, currentReduceCode, page.viewLanguage, options);
} else {
var viewParts = viewName.split('/');
+
+ if ($.futon.storage.get("stale")) {
+ options.stale = "ok";
+ }
+
db.view(viewParts[1] + "/" + viewParts[3], options);
}
}
@@ -774,7 +842,8 @@
$(this).html($("<pre></pre>").html($.futon.formatJSON(page.doc, {html: true})))
.makeEditable({allowEmpty: false,
createInput: function(value) {
- return $("<textarea rows='8' cols='80' spellcheck='false'></textarea>").enableTabInsertion();
+ var rows = value.split("\n").length;
+ return $("<textarea rows='" + rows + "' cols='80' spellcheck='false'></textarea>").enableTabInsertion();
},
prepareInput: function(input) {
$(input).makeResizable({vertical: true});
@@ -798,7 +867,7 @@
return true;
} catch (err) {
var msg = err.message;
- if (msg == "parseJSON" || msg == "JSON.parse") {
+ if (msg == "parseJSON" || msg == "JSON.parse") {
msg = "There is a syntax error in the document.";
}
$("<div class='error'></div>").text(msg).appendTo(this);
@@ -1042,10 +1111,11 @@
row.find("td").makeEditable({acceptOnBlur: false, allowEmpty: true,
createInput: function(value) {
+ value = doc[row.data("name")];
var elem = $(this);
if (elem.find("dl").length > 0 ||
elem.find("code").is(".array, .object") ||
- elem.find("code.string").text().length > 60) {
+ typeof(value) == "string" && (value.length > 60 || value.match(/\n/))) {
return $("<textarea rows='1' cols='40' spellcheck='false'></textarea>");
}
return $("<input type='text' spellcheck='false'>");
@@ -1117,7 +1187,40 @@
html: true,
escapeStrings: false
});
- return $(html);
+ var n = $(html);
+ if (n.text().length > 140) {
+ // This code reduces a long string in to a summarized string with a link to expand it.
+ // Someone, somewhere, is doing something nasty with the event after it leaves these handlers.
+ // At this time I can't track down the offender, it might actually be a jQuery propogation issue.
+ var fulltext = n.text();
+ var mintext = n.text().slice(0, 140);
+ var e = $('<a href="#expand">...</a>');
+ var m = $('<a href="#min">X</a>');
+ var expand = function (evt) {
+ n.empty();
+ n.text(fulltext);
+ n.append(m);
+ evt.stopPropagation();
+ evt.stopImmediatePropagation();
+ evt.preventDefault();
+ }
+ var minimize = function (evt) {
+ n.empty();
+ n.text(mintext);
+ // For some reason the old element's handler won't fire after removed and added again.
+ e = $('<a href="#expand">...</a>');
+ e.click(expand);
+ n.append(e);
+ evt.stopPropagation();
+ evt.stopImmediatePropagation();
+ evt.preventDefault();
+ }
+ e.click(expand);
+ n.click(minimize);
+ n.text(mintext);
+ n.append(e)
+ }
+ return n;
}
}
var elem = render(value);
@@ -1179,7 +1282,7 @@
for (var i=0; i < parts.length; i++) {
encoded.push(encodeURIComponent(parts[i]));
};
- return encoded.join('/');
+ return encoded.join('%2f');
}
})(jQuery);
diff --git a/share/www/script/futon.format.js b/share/www/script/futon.format.js
index 5feee817..8d9b7f5c 100644
--- a/share/www/script/futon.format.js
+++ b/share/www/script/futon.format.js
@@ -13,6 +13,14 @@
(function($) {
$.futon = $.futon || {};
$.extend($.futon, {
+ escape: function(string) {
+ return string.replace(/&/g, "&amp;")
+ .replace(/</g, "&lt;")
+ .replace(/>/g, "&gt;")
+ .replace(/"/, "&quot;")
+ .replace(/'/, "&#39;")
+ ;
+ },
// JSON pretty printing
formatJSON: function(val, options) {
@@ -24,12 +32,6 @@
}, options || {});
var itemsep = options.linesep.length ? "," + options.linesep : ", ";
- function escape(string) {
- return string.replace(/&/g, "&amp;")
- .replace(/</g, "&lt;")
- .replace(/>/g, "&gt;");
- }
-
function format(val, depth) {
var tab = [];
for (var i = 0; i < options.indent * depth; i++) tab.push("");
@@ -44,7 +46,11 @@
if (type == "string" && !options.escapeStrings) {
retval = indentLines(retval.replace(/\r\n/g, "\n"), tab.substr(options.indent));
} else {
- retval = escape(JSON.stringify(val));
+ if (options.html) {
+ retval = $.futon.escape(JSON.stringify(val));
+ } else {
+ retval = JSON.stringify(val);
+ }
}
if (options.html) {
retval = "<code class='" + type + "'>" + retval + "</code>";
@@ -88,7 +94,7 @@
if (options.quoteKeys) {
keyDisplay = keyDisplay.substr(1, keyDisplay.length - 2);
}
- keyDisplay = "<code class='key'>" + escape(keyDisplay) + "</code>";
+ keyDisplay = "<code class='key'>" + $.futon.escape(keyDisplay) + "</code>";
if (options.quoteKeys) {
keyDisplay = '"' + keyDisplay + '"';
}
@@ -114,7 +120,7 @@
function indentLines(text, tab) {
var lines = text.split("\n");
for (var i in lines) {
- lines[i] = (i > 0 ? tab : "") + escape(lines[i]);
+ lines[i] = (i > 0 ? tab : "") + $.futon.escape(lines[i]);
}
return lines.join("<br>");
}
diff --git a/share/www/script/futon.js b/share/www/script/futon.js
index 33c72318..c4647ed1 100644
--- a/share/www/script/futon.js
+++ b/share/www/script/futon.js
@@ -10,13 +10,25 @@
// License for the specific language governing permissions and limitations under
// the License.
+// $$ inspired by @wycats: http://yehudakatz.com/2009/04/20/evented-programming-with-jquery/
+function $$(node) {
+ var data = $(node).data("$$");
+ if (data) {
+ return data;
+ } else {
+ data = {};
+ $(node).data("$$", data);
+ return data;
+ }
+};
+
(function($) {
function Session() {
- function doLogin(username, password, callback) {
+ function doLogin(name, password, callback) {
$.couch.login({
- username : username,
+ name : name,
password : password,
success : function() {
$.futon.session.sidebar();
@@ -24,36 +36,36 @@
},
error : function(code, error, reason) {
$.futon.session.sidebar();
- callback({username : "Error logging in: "+reason});
+ callback({name : "Error logging in: "+reason});
}
});
};
- function doSignup(username, password, callback, runLogin) {
+ function doSignup(name, password, callback, runLogin) {
$.couch.signup({
- username : username
+ name : name
}, password, {
success : function() {
if (runLogin) {
- doLogin(username, password, callback);
+ doLogin(name, password, callback);
} else {
callback();
}
},
error : function(status, error, reason) {
$.futon.session.sidebar();
- if (error = "conflict") {
- callback({username : "Name '"+username+"' is taken"});
+ if (error == "conflict") {
+ callback({name : "Name '"+name+"' is taken"});
} else {
- callback({username : "Signup error: "+reason});
+ callback({name : "Signup error: "+reason});
}
}
});
};
function validateUsernameAndPassword(data, callback) {
- if (!data.username || data.username.length == 0) {
- callback({username: "Please enter a username."});
+ if (!data.name || data.name.length == 0) {
+ callback({name: "Please enter a name."});
return false;
};
if (!data.password || data.password.length == 0) {
@@ -69,11 +81,21 @@
if (!validateUsernameAndPassword(data, callback)) return;
$.couch.config({
success : function() {
- callback();
- doLogin(data.username, data.password, callback);
- doSignup(data.username, null, callback, false);
+ doLogin(data.name, data.password, function(errors) {
+ if(!$.isEmptyObject(errors)) {
+ callback(errors);
+ return;
+ }
+ doSignup(data.name, null, function(errors) {
+ if (errors && errors.name && errors.name.indexOf && errors.name.indexOf("taken") == -1) {
+ callback(errors);
+ } else {
+ callback();
+ }
+ }, false);
+ });
}
- }, "admins", data.username, data.password);
+ }, "admins", data.name, data.password);
}
});
return false;
@@ -83,7 +105,7 @@
$.showDialog("dialog/_login.html", {
submit: function(data, callback) {
if (!validateUsernameAndPassword(data, callback)) return;
- doLogin(data.username, data.password, callback);
+ doLogin(data.name, data.password, callback);
}
});
return false;
@@ -101,7 +123,7 @@
$.showDialog("dialog/_signup.html", {
submit: function(data, callback) {
if (!validateUsernameAndPassword(data, callback)) return;
- doSignup(data.username, data.password, callback, true);
+ doSignup(data.name, data.password, callback, true);
}
});
return false;
@@ -118,9 +140,11 @@
// get users db info?
$("#userCtx span").hide();
$.couch.session({
- success : function(userCtx) {
+ success : function(r) {
+ var userCtx = r.userCtx;
+ $$("#userCtx").userCtx = userCtx;
if (userCtx.name) {
- $("#userCtx .username").text(userCtx.name).attr({href : "/_utils/document.html?users/org.couchdb.user%3A"+userCtx.name});
+ $("#userCtx .name").text(userCtx.name).attr({href : $.couch.urlPrefix + "/_utils/document.html?"+encodeURIComponent(r.info.authentication_db)+"/org.couchdb.user%3A"+encodeURIComponent(userCtx.name)});
if (userCtx.roles.indexOf("_admin") != -1) {
$("#userCtx .loggedinadmin").show();
} else {
@@ -159,7 +183,8 @@
}
this.addDatabase = function(name) {
- var recentDbs = $.futon.storage.get("recent", "").split(",");
+ var current = $.futon.storage.get("recent", "");
+ var recentDbs = current ? current.split(",") : [];
if ($.inArray(name, recentDbs) == -1) {
recentDbs.unshift(name);
if (recentDbs.length > 10) recentDbs.length = 10;
@@ -170,7 +195,8 @@
this.removeDatabase = function(name) {
// remove database from recent databases list
- var recentDbs = $.futon.storage.get("recent").split(",");
+ var current = $.futon.storage.get("recent", "");
+ var recentDbs = current ? current.split(",") : [];
var recentIdx = $.inArray(name, recentDbs);
if (recentIdx >= 0) {
recentDbs.splice(recentIdx, 1);
@@ -189,9 +215,10 @@
recentDbs.sort();
$.each(recentDbs, function(idx, name) {
if (name) {
+ name = encodeURIComponent(name);
$("#dbs").append("<li>" +
"<button class='remove' title='Remove from list' value='" + name + "'></button>" +
- "<a href='database.html?" + encodeURIComponent(name) + "' title='" + name + "'>" + name +
+ "<a href='database.html?" + name + "' title='" + name + "'>" + name +
"</a></li>");
}
});
@@ -308,11 +335,22 @@
return callback(decl);
}
+ function windowName() {
+ try {
+ return JSON.parse(window.name || "{}");
+ } catch (e) {
+ return {};
+ }
+ }
+
+ // add suffix to cookie names to be able to separate between ports
+ var cookiePrefix = location.port + "_";
+
var handlers = {
"cookie": {
get: function(name) {
- var nameEq = name + "=";
+ var nameEq = cookiePrefix + name + "=";
var parts = document.cookie.split(';');
for (var i = 0; i < parts.length; i++) {
var part = parts[i].replace(/^\s+/, "");
@@ -324,27 +362,28 @@
set: function(name, value) {
var date = new Date();
date.setTime(date.getTime() + 14*24*60*60*1000); // two weeks
- document.cookie = name + "=" + escape(value) + "; expires=" +
- date.toGMTString();
+ document.cookie = cookiePrefix + name + "=" + escape(value) +
+ "; expires=" + date.toGMTString();
},
del: function(name) {
var date = new Date();
date.setTime(date.getTime() - 24*60*60*1000); // yesterday
- document.cookie = name + "=; expires=" + date.toGMTString();
+ document.cookie = cookiePrefix + name + "=" +
+ "; expires=" + date.toGMTString();
}
},
"window": {
get: function(name) {
- return JSON.parse(window.name || "{}")[name];
+ return windowName()[name];
},
set: function(name, value) {
- var obj = JSON.parse(window.name || "{}");
+ var obj = windowName();
obj[name] = value || null;
window.name = JSON.stringify(obj);
},
del: function(name) {
- var obj = JSON.parse(window.name || "{}");
+ var obj = windowName();
delete obj[name];
window.name = JSON.stringify(obj);
}
@@ -354,6 +393,7 @@
}
+ $.couch.urlPrefix = "..";
$.futon = $.futon || {};
$.extend($.futon, {
navigation: new Navigation(),
@@ -361,31 +401,21 @@
storage: new Storage()
});
- $.fn.addPlaceholder = function(text) {
- return this.each(function() {
+ $.fn.addPlaceholder = function() {
+ if (this[0] && "placeholder" in document.createElement("input")) {
+ return; // found native placeholder support
+ }
+ return this.live('focusin', function() {
var input = $(this);
- if ($.browser.safari) {
- input.attr("placeholder", text);
- return;
+ if (input.val() === input.attr("placeholder")) {
+ input.removeClass("placeholder").val("");
}
- input.blur(function() {
- if ($.trim(input.val()) == "") {
- input.addClass("placeholder").val(text);
- } else {
- input.removeClass("placeholder");
- }
- }).triggerHandler("blur")
- input.focus(function() {
- if (input.is(".placeholder")) {
- input.val("").removeClass("placeholder");
- }
- });
- $(this.form).submit(function() {
- if (input.is(".placeholder")) {
- input.val("");
- }
- });
- });
+ }).live("focusout", function() {
+ var input = $(this);
+ if (input.val() === "") {
+ input.val(input.attr("placeholder")).addClass("placeholder");
+ }
+ }).trigger("focusout");
}
$.fn.enableTabInsertion = function(chars) {
@@ -421,6 +451,8 @@
// doing this as early as possible prevents flickering
$(document.body).addClass("fullwidth");
}
+ $("input[placeholder]").addPlaceholder();
+
$.get("_sidebar.html", function(resp) {
$("#wrap").append(resp)
.find("#sidebar-toggle").click(function(e) {
@@ -442,7 +474,6 @@
$("#version").text(info.version);
}
});
-
});
});
diff --git a/share/www/script/jquery.couch.js b/share/www/script/jquery.couch.js
index 7e8a0236..8d86b679 100644
--- a/share/www/script/jquery.couch.js
+++ b/share/www/script/jquery.couch.js
@@ -28,7 +28,7 @@
return;
}
var user_prefix = "org.couchdb.user:";
- user_doc._id = user_doc._id || user_prefix + user_doc.username;
+ user_doc._id = user_doc._id || user_prefix + user_doc.name;
if (new_password) {
// handle the password crypto
user_doc.salt = $.couch.newUUID();
@@ -36,17 +36,18 @@
}
user_doc.type = "user";
if (!user_doc.roles) {
- user_doc.roles = []
+ user_doc.roles = [];
}
return user_doc;
};
- uuidCache = [];
+ var uuidCache = [];
$.extend($.couch, {
+ urlPrefix: '',
activeTasks: function(options) {
ajax(
- {url: "/_active_tasks"},
+ {url: this.urlPrefix + "/_active_tasks"},
options,
"Active task status could not be retrieved"
);
@@ -54,14 +55,14 @@
allDbs: function(options) {
ajax(
- {url: "/_all_dbs"},
+ {url: this.urlPrefix + "/_all_dbs"},
options,
"An error occurred retrieving the list of all databases"
);
},
config: function(options, section, option, value) {
- var req = {url: "/_config/"};
+ var req = {url: this.urlPrefix + "/_config/"};
if (section) {
req.url += encodeURIComponent(section) + "/";
if (option) {
@@ -85,7 +86,7 @@
session: function(options) {
options = options || {};
$.ajax({
- type: "GET", url: "/_session",
+ type: "GET", url: this.urlPrefix + "/_session",
complete: function(req) {
var resp = $.httpData(req, "json");
if (req.status == 200) {
@@ -102,7 +103,7 @@
userDb : function(callback) {
$.couch.session({
success : function(resp) {
- var userDb = $.couch.db(resp.info.user_db);
+ var userDb = $.couch.db(resp.info.authentication_db);
callback(userDb);
}
});
@@ -114,14 +115,14 @@
user_doc = prepareUserDoc(user_doc, password);
$.couch.userDb(function(db) {
db.saveDoc(user_doc, options);
- })
+ });
},
login: function(options) {
options = options || {};
$.ajax({
- type: "POST", url: "/_session", dataType: "json",
- data: {username: options.username, password: options.password},
+ type: "POST", url: this.urlPrefix + "/_session", dataType: "json",
+ data: {name: options.name, password: options.password},
complete: function(req) {
var resp = $.httpData(req, "json");
if (req.status == 200) {
@@ -137,7 +138,7 @@
logout: function(options) {
options = options || {};
$.ajax({
- type: "DELETE", url: "/_session", dataType: "json",
+ type: "DELETE", url: this.urlPrefix + "/_session", dataType: "json",
username : "_", password : "_",
complete: function(req) {
var resp = $.httpData(req, "json");
@@ -152,10 +153,28 @@
});
},
- db: function(name) {
+ db: function(name, db_opts) {
+ db_opts = db_opts || {};
+ var rawDocs = {};
+ function maybeApplyVersion(doc) {
+ if (doc._id && doc._rev && rawDocs[doc._id] && rawDocs[doc._id].rev == doc._rev) {
+ // todo: can we use commonjs require here?
+ if (typeof Base64 == "undefined") {
+ alert("please include /_utils/script/base64.js in the page for base64 support");
+ return false;
+ } else {
+ doc._attachments = doc._attachments || {};
+ doc._attachments["rev-"+doc._rev.split("-")[0]] = {
+ content_type :"application/json",
+ data : Base64.encode(rawDocs[doc._id].raw)
+ };
+ return true;
+ }
+ }
+ };
return {
name: name,
- uri: "/" + encodeURIComponent(name) + "/",
+ uri: this.urlPrefix + "/" + encodeURIComponent(name) + "/",
compact: function(options) {
$.extend(options, {successStatus: 202});
@@ -211,9 +230,79 @@
"Database information could not be retrieved"
);
},
+ changes: function(since, options) {
+ options = options || {};
+ // set up the promise object within a closure for this handler
+ var timeout = 100, db = this, active = true,
+ listeners = [],
+ promise = {
+ onChange : function(fun) {
+ listeners.push(fun);
+ },
+ stop : function() {
+ active = false;
+ }
+ };
+ // call each listener when there is a change
+ function triggerListeners(resp) {
+ $.each(listeners, function() {
+ this(resp);
+ });
+ };
+ // when there is a change, call any listeners, then check for another change
+ options.success = function(resp) {
+ timeout = 100;
+ if (active) {
+ since = resp.last_seq;
+ triggerListeners(resp);
+ getChangesSince();
+ };
+ };
+ options.error = function() {
+ if (active) {
+ setTimeout(getChangesSince, timeout);
+ timeout = timeout * 2;
+ }
+ };
+ // actually make the changes request
+ function getChangesSince() {
+ var opts = $.extend({heartbeat : 10 * 1000}, options, {
+ feed : "longpoll",
+ since : since
+ });
+ ajax(
+ {url: db.uri + "_changes"+encodeOptions(opts)},
+ options,
+ "Error connecting to "+db.uri+"/_changes."
+ );
+ }
+ // start the first request
+ if (since) {
+ getChangesSince();
+ } else {
+ db.info({
+ success : function(info) {
+ since = info.update_seq;
+ getChangesSince();
+ }
+ });
+ }
+ return promise;
+ },
allDocs: function(options) {
- ajax(
- {url: this.uri + "_all_docs" + encodeOptions(options)},
+ var type = "GET";
+ var data = null;
+ if (options["keys"]) {
+ type = "POST";
+ var keys = options["keys"];
+ delete options["keys"];
+ data = toJSON({ "keys": keys });
+ }
+ ajax({
+ type: type,
+ data: data,
+ url: this.uri + "_all_docs" + encodeOptions(options)
+ },
options,
"An error occurred retrieving a list of all documents"
);
@@ -246,10 +335,32 @@
}
});
} else {
- alert("please provide an eachApp function for allApps()");
+ alert("Please provide an eachApp function for allApps()");
}
},
openDoc: function(docId, options, ajaxOptions) {
+ options = options || {};
+ if (db_opts.attachPrevRev || options.attachPrevRev) {
+ $.extend(options, {
+ beforeSuccess : function(req, doc) {
+ rawDocs[doc._id] = {
+ rev : doc._rev,
+ raw : req.responseText
+ };
+ }
+ });
+ } else {
+ $.extend(options, {
+ beforeSuccess : function(req, doc) {
+ if (doc["jquery.couch.attachPrevRev"]) {
+ rawDocs[doc._id] = {
+ rev : doc._rev,
+ raw : req.responseText
+ };
+ }
+ }
+ });
+ }
ajax({url: this.uri + encodeDocId(docId) + encodeOptions(options)},
options,
"The document could not be retrieved",
@@ -258,6 +369,8 @@
},
saveDoc: function(doc, options) {
options = options || {};
+ var db = this;
+ var beforeSend = fullCommit(options);
if (doc._id === undefined) {
var method = "POST";
var uri = this.uri;
@@ -265,16 +378,28 @@
var method = "PUT";
var uri = this.uri + encodeDocId(doc._id);
}
+ var versioned = maybeApplyVersion(doc);
$.ajax({
type: method, url: uri + encodeOptions(options),
contentType: "application/json",
dataType: "json", data: toJSON(doc),
+ beforeSend : beforeSend,
complete: function(req) {
var resp = $.httpData(req, "json");
- if (req.status == 201) {
+ if (req.status == 200 || req.status == 201 || req.status == 202) {
doc._id = resp.id;
doc._rev = resp.rev;
- if (options.success) options.success(resp);
+ if (versioned) {
+ db.openDoc(doc._id, {
+ attachPrevRev : true,
+ success : function(d) {
+ doc._attachments = d._attachments;
+ if (options.success) options.success(resp);
+ }
+ });
+ } else {
+ if (options.success) options.success(resp);
+ }
} else if (options.error) {
options.error(req.status, resp.error, resp.reason);
} else {
@@ -284,10 +409,12 @@
});
},
bulkSave: function(docs, options) {
- $.extend(options, {successStatus: 201});
+ var beforeSend = fullCommit(options);
+ $.extend(options, {successStatus: 201, beforeSend : beforeSend});
ajax({
type: "POST",
- url: this.uri + "_bulk_docs" + encodeOptions(options)
+ url: this.uri + "_bulk_docs" + encodeOptions(options),
+ contentType: "application/json", data: toJSON(docs)
},
options,
"The documents could not be saved"
@@ -304,13 +431,27 @@
"The document could not be deleted"
);
},
- copyDoc: function(doc, options, ajaxOptions) {
+ bulkRemove: function(docs, options){
+ docs.docs = $.each(
+ docs.docs, function(i, doc){
+ doc._deleted = true;
+ }
+ );
+ $.extend(options, {successStatus: 201});
+ ajax({
+ type: "POST",
+ url: this.uri + "_bulk_docs" + encodeOptions(options),
+ data: toJSON(docs)
+ },
+ options,
+ "The documents could not be deleted"
+ );
+ },
+ copyDoc: function(docId, options, ajaxOptions) {
ajaxOptions = $.extend(ajaxOptions, {
complete: function(req) {
var resp = $.httpData(req, "json");
if (req.status == 201) {
- doc._id = resp.id;
- doc._rev = resp.rev;
if (options.success) options.success(resp);
} else if (options.error) {
options.error(req.status, resp.error, resp.reason);
@@ -321,9 +462,7 @@
});
ajax({
type: "COPY",
- url: this.uri +
- encodeDocId(doc._id) +
- encodeOptions({rev: doc._rev})
+ url: this.uri + encodeDocId(docId)
},
options,
"The document could not be copied",
@@ -350,6 +489,26 @@
"An error occurred querying the database"
);
},
+ list: function(list, view, options) {
+ var list = list.split('/');
+ var options = options || {};
+ var type = 'GET';
+ var data = null;
+ if (options['keys']) {
+ type = 'POST';
+ var keys = options['keys'];
+ delete options['keys'];
+ data = toJSON({'keys': keys });
+ }
+ ajax({
+ type: type,
+ data: data,
+ url: this.uri + '_design/' + list[0] +
+ '/_list/' + list[1] + '/' + view + encodeOptions(options)
+ },
+ options, 'An error occured accessing the list'
+ );
+ },
view: function(name, options) {
var name = name.split('/');
var options = options || {};
@@ -369,6 +528,25 @@
},
options, "An error occurred accessing the view"
);
+ },
+ getDbProperty: function(propName, options, ajaxOptions) {
+ ajax({url: this.uri + propName + encodeOptions(options)},
+ options,
+ "The property could not be retrieved",
+ ajaxOptions
+ );
+ },
+
+ setDbProperty: function(propName, propValue, options, ajaxOptions) {
+ ajax({
+ type: "PUT",
+ url: this.uri + propName + encodeOptions(options),
+ data : JSON.stringify(propValue)
+ },
+ options,
+ "The property could not be updated",
+ ajaxOptions
+ );
}
};
},
@@ -377,19 +555,23 @@
info: function(options) {
ajax(
- {url: "/"},
+ {url: this.urlPrefix + "/"},
options,
"Server information could not be retrieved"
);
},
- replicate: function(source, target, options) {
+ replicate: function(source, target, ajaxOptions, repOpts) {
+ repOpts = $.extend({source: source, target: target}, repOpts);
+ if (repOpts.continuous) {
+ ajaxOptions.successStatus = 202;
+ }
ajax({
- type: "POST", url: "/_replicate",
- data: JSON.stringify({source: source, target: target}),
+ type: "POST", url: this.urlPrefix + "/_replicate",
+ data: JSON.stringify(repOpts),
contentType: "application/json"
},
- options,
+ ajaxOptions,
"Replication failed"
);
},
@@ -399,9 +581,9 @@
cacheNum = 1;
}
if (!uuidCache.length) {
- ajax({url: "/_uuids", data: {count: cacheNum}, async: false}, {
+ ajax({url: this.urlPrefix + "/_uuids", data: {count: cacheNum}, async: false}, {
success: function(resp) {
- uuidCache = resp.uuids
+ uuidCache = resp.uuids;
}
},
"Failed to retrieve UUID batch."
@@ -409,21 +591,40 @@
}
return uuidCache.shift();
}
-
});
function ajax(obj, options, errorMessage, ajaxOptions) {
options = $.extend({successStatus: 200}, options);
+ ajaxOptions = $.extend({contentType: "application/json"}, ajaxOptions);
errorMessage = errorMessage || "Unknown error";
-
$.ajax($.extend($.extend({
- type: "GET", dataType: "json",
+ type: "GET", dataType: "json", cache : !$.browser.msie,
+ beforeSend: function(xhr){
+ if(ajaxOptions && ajaxOptions.headers){
+ for (var header in ajaxOptions.headers){
+ xhr.setRequestHeader(header, ajaxOptions.headers[header]);
+ }
+ }
+ },
complete: function(req) {
- var resp = $.httpData(req, "json");
+ try {
+ var resp = $.httpData(req, "json");
+ } catch(e) {
+ if (options.error) {
+ options.error(req.status, req, e);
+ } else {
+ alert(errorMessage + ": " + e);
+ }
+ return;
+ }
+ if (options.ajaxStart) {
+ options.ajaxStart(resp);
+ }
if (req.status == options.successStatus) {
+ if (options.beforeSuccess) options.beforeSuccess(req, resp);
if (options.success) options.success(resp);
} else if (options.error) {
- options.error(req.status, resp.error, resp.reason);
+ options.error(req.status, resp && resp.error || errorMessage, resp && resp.reason || "no response");
} else {
alert(errorMessage + ": " + resp.reason);
}
@@ -431,13 +632,24 @@
}, obj), ajaxOptions));
}
+ function fullCommit(options) {
+ var options = options || {};
+ if (typeof options.ensure_full_commit !== "undefined") {
+ var commit = options.ensure_full_commit;
+ delete options.ensure_full_commit;
+ return function(xhr) {
+ xhr.setRequestHeader("X-Couch-Full-Commit", commit.toString());
+ };
+ }
+ };
+
// Convert a options object to an url query string.
// ex: {key:'value',key2:'value2'} becomes '?key="value"&key2="value2"'
function encodeOptions(options) {
var buf = [];
if (typeof(options) === "object" && options !== null) {
for (var name in options) {
- if ($.inArray(name, ["error", "success"]) >= 0)
+ if ($.inArray(name, ["error", "success", "beforeSuccess", "ajaxStart"]) >= 0)
continue;
var value = options[name];
if ($.inArray(name, ["key", "startkey", "endkey"]) >= 0) {
diff --git a/share/www/script/jquery.dialog.js b/share/www/script/jquery.dialog.js
index 41dd08e6..02c0c497 100644
--- a/share/www/script/jquery.dialog.js
+++ b/share/www/script/jquery.dialog.js
@@ -79,7 +79,7 @@
data[this.name] = this.value; // file inputs need special handling
});
options.submit(data, function callback(errors) {
- if (errors == null || errors == {}) {
+ if ($.isEmptyObject(errors)) {
dismiss();
} else {
for (var name in errors) {
diff --git a/share/www/script/jquery.form.js b/share/www/script/jquery.form.js
index e35ef0ef..dde39427 100644
--- a/share/www/script/jquery.form.js
+++ b/share/www/script/jquery.form.js
@@ -1,44 +1,42 @@
/*
* jQuery Form Plugin
- * version: 2.18 (06-JAN-2009)
- * @requires jQuery v1.2.2 or later
+ * version: 2.36 (07-NOV-2009)
+ * @requires jQuery v1.2.6 or later
*
* Examples and documentation at: http://malsup.com/jquery/form/
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
- *
- * Revision: $Id$
*/
;(function($) {
/*
- Usage Note:
- -----------
- Do not use both ajaxSubmit and ajaxForm on the same form. These
- functions are intended to be exclusive. Use ajaxSubmit if you want
- to bind your own submit handler to the form. For example,
-
- $(document).ready(function() {
- $('#myForm').bind('submit', function() {
- $(this).ajaxSubmit({
- target: '#output'
- });
- return false; // <-- important!
- });
- });
-
- Use ajaxForm when you want the plugin to manage all the event binding
- for you. For example,
-
- $(document).ready(function() {
- $('#myForm').ajaxForm({
- target: '#output'
- });
- });
-
- When using ajaxForm, the ajaxSubmit function will be invoked for you
- at the appropriate time.
+ Usage Note:
+ -----------
+ Do not use both ajaxSubmit and ajaxForm on the same form. These
+ functions are intended to be exclusive. Use ajaxSubmit if you want
+ to bind your own submit handler to the form. For example,
+
+ $(document).ready(function() {
+ $('#myForm').bind('submit', function() {
+ $(this).ajaxSubmit({
+ target: '#output'
+ });
+ return false; // <-- important!
+ });
+ });
+
+ Use ajaxForm when you want the plugin to manage all the event binding
+ for you. For example,
+
+ $(document).ready(function() {
+ $('#myForm').ajaxForm({
+ target: '#output'
+ });
+ });
+
+ When using ajaxForm, the ajaxSubmit function will be invoked for you
+ at the appropriate time.
*/
/**
@@ -46,292 +44,319 @@
* an HTML form using AJAX.
*/
$.fn.ajaxSubmit = function(options) {
- // fast fail if nothing selected (http://dev.jquery.com/ticket/2752)
- if (!this.length) {
- log('ajaxSubmit: skipping submit process - no element selected');
- return this;
- }
-
- if (typeof options == 'function')
- options = { success: options };
-
- options = $.extend({
- url: this.attr('action') || window.location.toString(),
- type: this.attr('method') || 'GET'
- }, options || {});
-
- // hook for manipulating the form data before it is extracted;
- // convenient for use with rich editors like tinyMCE or FCKEditor
- var veto = {};
- this.trigger('form-pre-serialize', [this, options, veto]);
- if (veto.veto) {
- log('ajaxSubmit: submit vetoed via form-pre-serialize trigger');
- return this;
- }
-
- // provide opportunity to alter form data before it is serialized
- if (options.beforeSerialize && options.beforeSerialize(this, options) === false) {
- log('ajaxSubmit: submit aborted via beforeSerialize callback');
- return this;
- }
-
- var a = this.formToArray(options.semantic);
- if (options.data) {
- options.extraData = options.data;
- for (var n in options.data) {
- if(options.data[n] instanceof Array) {
- for (var k in options.data[n])
- a.push( { name: n, value: options.data[n][k] } )
- }
- else
- a.push( { name: n, value: options.data[n] } );
- }
- }
-
- // give pre-submit callback an opportunity to abort the submit
- if (options.beforeSubmit && options.beforeSubmit(a, this, options) === false) {
- log('ajaxSubmit: submit aborted via beforeSubmit callback');
- return this;
- }
-
- // fire vetoable 'validate' event
- this.trigger('form-submit-validate', [a, this, options, veto]);
- if (veto.veto) {
- log('ajaxSubmit: submit vetoed via form-submit-validate trigger');
- return this;
- }
-
- var q = $.param(a);
-
- if (options.type.toUpperCase() == 'GET') {
- options.url += (options.url.indexOf('?') >= 0 ? '&' : '?') + q;
- options.data = null; // data is null for 'get'
- }
- else
- options.data = q; // data is the query string for 'post'
-
- var $form = this, callbacks = [];
- if (options.resetForm) callbacks.push(function() { $form.resetForm(); });
- if (options.clearForm) callbacks.push(function() { $form.clearForm(); });
-
- // perform a load on the target only if dataType is not provided
- if (!options.dataType && options.target) {
- var oldSuccess = options.success || function(){};
- callbacks.push(function(data) {
- $(options.target).html(data).each(oldSuccess, arguments);
- });
- }
- else if (options.success)
- callbacks.push(options.success);
-
- options.success = function(data, status) {
- for (var i=0, max=callbacks.length; i < max; i++)
- callbacks[i].apply(options, [data, status, $form]);
- };
-
- // are there files to upload?
- var files = $('input:file', this).fieldValue();
- var found = false;
- for (var j=0; j < files.length; j++)
- if (files[j])
- found = true;
-
- // options.iframe allows user to force iframe mode
- if (options.iframe || found) {
- // hack to fix Safari hang (thanks to Tim Molendijk for this)
- // see: http://groups.google.com/group/jquery-dev/browse_thread/thread/36395b7ab510dd5d
- if ($.browser.safari && options.closeKeepAlive)
- $.get(options.closeKeepAlive, fileUpload);
- else
- fileUpload();
- }
+ // fast fail if nothing selected (http://dev.jquery.com/ticket/2752)
+ if (!this.length) {
+ log('ajaxSubmit: skipping submit process - no element selected');
+ return this;
+ }
+
+ if (typeof options == 'function')
+ options = { success: options };
+
+ var url = $.trim(this.attr('action'));
+ if (url) {
+ // clean url (don't include hash vaue)
+ url = (url.match(/^([^#]+)/)||[])[1];
+ }
+ url = url || window.location.href || '';
+
+ options = $.extend({
+ url: url,
+ type: this.attr('method') || 'GET',
+ iframeSrc: /^https/i.test(window.location.href || '') ? 'javascript:false' : 'about:blank'
+ }, options || {});
+
+ // hook for manipulating the form data before it is extracted;
+ // convenient for use with rich editors like tinyMCE or FCKEditor
+ var veto = {};
+ this.trigger('form-pre-serialize', [this, options, veto]);
+ if (veto.veto) {
+ log('ajaxSubmit: submit vetoed via form-pre-serialize trigger');
+ return this;
+ }
+
+ // provide opportunity to alter form data before it is serialized
+ if (options.beforeSerialize && options.beforeSerialize(this, options) === false) {
+ log('ajaxSubmit: submit aborted via beforeSerialize callback');
+ return this;
+ }
+
+ var a = this.formToArray(options.semantic);
+ if (options.data) {
+ options.extraData = options.data;
+ for (var n in options.data) {
+ if(options.data[n] instanceof Array) {
+ for (var k in options.data[n])
+ a.push( { name: n, value: options.data[n][k] } );
+ }
+ else
+ a.push( { name: n, value: options.data[n] } );
+ }
+ }
+
+ // give pre-submit callback an opportunity to abort the submit
+ if (options.beforeSubmit && options.beforeSubmit(a, this, options) === false) {
+ log('ajaxSubmit: submit aborted via beforeSubmit callback');
+ return this;
+ }
+
+ // fire vetoable 'validate' event
+ this.trigger('form-submit-validate', [a, this, options, veto]);
+ if (veto.veto) {
+ log('ajaxSubmit: submit vetoed via form-submit-validate trigger');
+ return this;
+ }
+
+ var q = $.param(a);
+
+ if (options.type.toUpperCase() == 'GET') {
+ options.url += (options.url.indexOf('?') >= 0 ? '&' : '?') + q;
+ options.data = null; // data is null for 'get'
+ }
+ else
+ options.data = q; // data is the query string for 'post'
+
+ var $form = this, callbacks = [];
+ if (options.resetForm) callbacks.push(function() { $form.resetForm(); });
+ if (options.clearForm) callbacks.push(function() { $form.clearForm(); });
+
+ // perform a load on the target only if dataType is not provided
+ if (!options.dataType && options.target) {
+ var oldSuccess = options.success || function(){};
+ callbacks.push(function(data) {
+ $(options.target).html(data).each(oldSuccess, arguments);
+ });
+ }
+ else if (options.success)
+ callbacks.push(options.success);
+
+ options.success = function(data, status) {
+ for (var i=0, max=callbacks.length; i < max; i++)
+ callbacks[i].apply(options, [data, status, $form]);
+ };
+
+ // are there files to upload?
+ var files = $('input:file', this).fieldValue();
+ var found = false;
+ for (var j=0; j < files.length; j++)
+ if (files[j])
+ found = true;
+
+ var multipart = false;
+// var mp = 'multipart/form-data';
+// multipart = ($form.attr('enctype') == mp || $form.attr('encoding') == mp);
+
+ // options.iframe allows user to force iframe mode
+ // 06-NOV-09: now defaulting to iframe mode if file input is detected
+ if ((files.length && options.iframe !== false) || options.iframe || found || multipart) {
+ // hack to fix Safari hang (thanks to Tim Molendijk for this)
+ // see: http://groups.google.com/group/jquery-dev/browse_thread/thread/36395b7ab510dd5d
+ if (options.closeKeepAlive)
+ $.get(options.closeKeepAlive, fileUpload);
+ else
+ fileUpload();
+ }
else
- $.ajax(options);
-
- // fire 'notify' event
- this.trigger('form-submit-notify', [this, options]);
- return this;
-
-
- // private function for handling file uploads (hat tip to YAHOO!)
- function fileUpload() {
- var form = $form[0];
-
- if ($(':input[name=submit]', form).length) {
- alert('Error: Form elements must not be named "submit".');
- return;
- }
-
- var opts = $.extend({}, $.ajaxSettings, options);
- var s = jQuery.extend(true, {}, $.extend(true, {}, $.ajaxSettings), opts);
-
- var id = 'jqFormIO' + (new Date().getTime());
- var $io = $('<iframe id="' + id + '" name="' + id + '" />');
- var io = $io[0];
-
- if ($.browser.msie || $.browser.opera)
- io.src = 'javascript:false;document.write("");';
- $io.css({ position: 'absolute', top: '-1000px', left: '-1000px' });
-
- var xhr = { // mock object
- aborted: 0,
- responseText: null,
- responseXML: null,
- status: 0,
- statusText: 'n/a',
- getAllResponseHeaders: function() {},
- getResponseHeader: function() {},
- setRequestHeader: function() {},
- abort: function() {
- this.aborted = 1;
- $io.attr('src','about:blank'); // abort op in progress
- }
- };
-
- var g = opts.global;
- // trigger ajax global events so that activity/block indicators work like normal
- if (g && ! $.active++) $.event.trigger("ajaxStart");
- if (g) $.event.trigger("ajaxSend", [xhr, opts]);
+ $.ajax(options);
+
+ // fire 'notify' event
+ this.trigger('form-submit-notify', [this, options]);
+ return this;
+
+
+ // private function for handling file uploads (hat tip to YAHOO!)
+ function fileUpload() {
+ var form = $form[0];
+
+ if ($(':input[name=submit]', form).length) {
+ alert('Error: Form elements must not be named "submit".');
+ return;
+ }
+
+ var opts = $.extend({}, $.ajaxSettings, options);
+ var s = $.extend(true, {}, $.extend(true, {}, $.ajaxSettings), opts);
+
+ var id = 'jqFormIO' + (new Date().getTime());
+ var $io = $('<iframe id="' + id + '" name="' + id + '" src="'+ opts.iframeSrc +'" />');
+ var io = $io[0];
+
+ $io.css({ position: 'absolute', top: '-1000px', left: '-1000px' });
+
+ var xhr = { // mock object
+ aborted: 0,
+ responseText: null,
+ responseXML: null,
+ status: 0,
+ statusText: 'n/a',
+ getAllResponseHeaders: function() {},
+ getResponseHeader: function() {},
+ setRequestHeader: function() {},
+ abort: function() {
+ this.aborted = 1;
+ $io.attr('src', opts.iframeSrc); // abort op in progress
+ }
+ };
+
+ var g = opts.global;
+ // trigger ajax global events so that activity/block indicators work like normal
+ if (g && ! $.active++) $.event.trigger("ajaxStart");
+ if (g) $.event.trigger("ajaxSend", [xhr, opts]);
if (s.beforeSend && s.beforeSend(xhr, s) === false) {
- s.global && jQuery.active--;
+ s.global && $.active--;
+ return;
+ }
+ if (xhr.aborted)
return;
- }
- if (xhr.aborted)
- return;
-
- var cbInvoked = 0;
- var timedOut = 0;
-
- // add submitting element to data if we know it
- var sub = form.clk;
- if (sub) {
- var n = sub.name;
- if (n && !sub.disabled) {
- options.extraData = options.extraData || {};
- options.extraData[n] = sub.value;
- if (sub.type == "image") {
- options.extraData[name+'.x'] = form.clk_x;
- options.extraData[name+'.y'] = form.clk_y;
- }
- }
- }
-
- // take a breath so that pending repaints get some cpu time before the upload starts
- setTimeout(function() {
- // make sure form attrs are set
- var t = $form.attr('target'), a = $form.attr('action');
- $form.attr({
- target: id,
- method: 'POST',
- action: opts.url
- });
-
- // ie borks in some cases when setting encoding
- if (! options.skipEncodingOverride) {
- $form.attr({
- encoding: 'multipart/form-data',
- enctype: 'multipart/form-data'
- });
- }
-
- // support timout
- if (opts.timeout)
- setTimeout(function() { timedOut = true; cb(); }, opts.timeout);
-
- // add "extra" data to form if provided in options
- var extraInputs = [];
- try {
- if (options.extraData)
- for (var n in options.extraData)
- extraInputs.push(
- $('<input type="hidden" name="'+n+'" value="'+options.extraData[n]+'" />')
- .appendTo(form)[0]);
-
- // add iframe to doc and submit the form
- $io.appendTo('body');
- io.attachEvent ? io.attachEvent('onload', cb) : io.addEventListener('load', cb, false);
- form.submit();
- }
- finally {
- // reset attrs and remove "extra" input elements
- $form.attr('action', a);
- t ? $form.attr('target', t) : $form.removeAttr('target');
- $(extraInputs).remove();
- }
- }, 10);
-
- function cb() {
- if (cbInvoked++) return;
-
- io.detachEvent ? io.detachEvent('onload', cb) : io.removeEventListener('load', cb, false);
-
- var operaHack = 0;
- var ok = true;
- try {
- if (timedOut) throw 'timeout';
- // extract the server response from the iframe
- var data, doc;
-
- doc = io.contentWindow ? io.contentWindow.document : io.contentDocument ? io.contentDocument : io.document;
-
- if (doc.body == null && !operaHack && $.browser.opera) {
- // In Opera 9.2.x the iframe DOM is not always traversable when
- // the onload callback fires so we give Opera 100ms to right itself
- operaHack = 1;
- cbInvoked--;
- setTimeout(cb, 100);
- return;
- }
-
- xhr.responseText = doc.body ? doc.body.innerHTML : null;
- xhr.responseXML = doc.XMLDocument ? doc.XMLDocument : doc;
- xhr.getResponseHeader = function(header){
- var headers = {'content-type': opts.dataType};
- return headers[header];
- };
-
- if (opts.dataType == 'json' || opts.dataType == 'script') {
- var ta = doc.getElementsByTagName('textarea')[0];
- xhr.responseText = ta ? ta.value : xhr.responseText;
- }
- else if (opts.dataType == 'xml' && !xhr.responseXML && xhr.responseText != null) {
- xhr.responseXML = toXml(xhr.responseText);
- }
- data = $.httpData(xhr, opts.dataType);
- }
- catch(e){
- ok = false;
- $.handleError(opts, xhr, 'error', e);
- }
-
- // ordering of these callbacks/triggers is odd, but that's how $.ajax does it
- if (ok) {
- opts.success(data, 'success');
- if (g) $.event.trigger("ajaxSuccess", [xhr, opts]);
- }
- if (g) $.event.trigger("ajaxComplete", [xhr, opts]);
- if (g && ! --$.active) $.event.trigger("ajaxStop");
- if (opts.complete) opts.complete(xhr, ok ? 'success' : 'error');
-
- // clean up
- setTimeout(function() {
- $io.remove();
- xhr.responseXML = null;
- }, 100);
- };
-
- function toXml(s, doc) {
- if (window.ActiveXObject) {
- doc = new ActiveXObject('Microsoft.XMLDOM');
- doc.async = 'false';
- doc.loadXML(s);
- }
- else
- doc = (new DOMParser()).parseFromString(s, 'text/xml');
- return (doc && doc.documentElement && doc.documentElement.tagName != 'parsererror') ? doc : null;
- };
- };
+
+ var cbInvoked = 0;
+ var timedOut = 0;
+
+ // add submitting element to data if we know it
+ var sub = form.clk;
+ if (sub) {
+ var n = sub.name;
+ if (n && !sub.disabled) {
+ options.extraData = options.extraData || {};
+ options.extraData[n] = sub.value;
+ if (sub.type == "image") {
+ options.extraData[name+'.x'] = form.clk_x;
+ options.extraData[name+'.y'] = form.clk_y;
+ }
+ }
+ }
+
+ // take a breath so that pending repaints get some cpu time before the upload starts
+ setTimeout(function() {
+ // make sure form attrs are set
+ var t = $form.attr('target'), a = $form.attr('action');
+
+ // update form attrs in IE friendly way
+ form.setAttribute('target',id);
+ if (form.getAttribute('method') != 'POST')
+ form.setAttribute('method', 'POST');
+ if (form.getAttribute('action') != opts.url)
+ form.setAttribute('action', opts.url);
+
+ // ie borks in some cases when setting encoding
+ if (! options.skipEncodingOverride) {
+ $form.attr({
+ encoding: 'multipart/form-data',
+ enctype: 'multipart/form-data'
+ });
+ }
+
+ // support timout
+ if (opts.timeout)
+ setTimeout(function() { timedOut = true; cb(); }, opts.timeout);
+
+ // add "extra" data to form if provided in options
+ var extraInputs = [];
+ try {
+ if (options.extraData)
+ for (var n in options.extraData)
+ extraInputs.push(
+ $('<input type="hidden" name="'+n+'" value="'+options.extraData[n]+'" />')
+ .appendTo(form)[0]);
+
+ // add iframe to doc and submit the form
+ $io.appendTo('body');
+ io.attachEvent ? io.attachEvent('onload', cb) : io.addEventListener('load', cb, false);
+ form.submit();
+ }
+ finally {
+ // reset attrs and remove "extra" input elements
+ form.setAttribute('action',a);
+ t ? form.setAttribute('target', t) : $form.removeAttr('target');
+ $(extraInputs).remove();
+ }
+ }, 10);
+
+ var domCheckCount = 50;
+
+ function cb() {
+ if (cbInvoked++) return;
+
+ io.detachEvent ? io.detachEvent('onload', cb) : io.removeEventListener('load', cb, false);
+
+ var ok = true;
+ try {
+ if (timedOut) throw 'timeout';
+ // extract the server response from the iframe
+ var data, doc;
+
+ doc = io.contentWindow ? io.contentWindow.document : io.contentDocument ? io.contentDocument : io.document;
+
+ var isXml = opts.dataType == 'xml' || doc.XMLDocument || $.isXMLDoc(doc);
+ log('isXml='+isXml);
+ if (!isXml && (doc.body == null || doc.body.innerHTML == '')) {
+ if (--domCheckCount) {
+ // in some browsers (Opera) the iframe DOM is not always traversable when
+ // the onload callback fires, so we loop a bit to accommodate
+ cbInvoked = 0;
+ setTimeout(cb, 100);
+ return;
+ }
+ log('Could not access iframe DOM after 50 tries.');
+ return;
+ }
+
+ xhr.responseText = doc.body ? doc.body.innerHTML : null;
+ xhr.responseXML = doc.XMLDocument ? doc.XMLDocument : doc;
+ xhr.getResponseHeader = function(header){
+ var headers = {'content-type': opts.dataType};
+ return headers[header];
+ };
+
+ if (opts.dataType == 'json' || opts.dataType == 'script') {
+ // see if user embedded response in textarea
+ var ta = doc.getElementsByTagName('textarea')[0];
+ if (ta)
+ xhr.responseText = ta.value;
+ else {
+ // account for browsers injecting pre around json response
+ var pre = doc.getElementsByTagName('pre')[0];
+ if (pre)
+ xhr.responseText = pre.innerHTML;
+ }
+ }
+ else if (opts.dataType == 'xml' && !xhr.responseXML && xhr.responseText != null) {
+ xhr.responseXML = toXml(xhr.responseText);
+ }
+ data = $.httpData(xhr, opts.dataType);
+ }
+ catch(e){
+ ok = false;
+ $.handleError(opts, xhr, 'error', e);
+ }
+
+ // ordering of these callbacks/triggers is odd, but that's how $.ajax does it
+ if (ok) {
+ opts.success(data, 'success');
+ if (g) $.event.trigger("ajaxSuccess", [xhr, opts]);
+ }
+ if (g) $.event.trigger("ajaxComplete", [xhr, opts]);
+ if (g && ! --$.active) $.event.trigger("ajaxStop");
+ if (opts.complete) opts.complete(xhr, ok ? 'success' : 'error');
+
+ // clean up
+ setTimeout(function() {
+ $io.remove();
+ xhr.responseXML = null;
+ }, 100);
+ };
+
+ function toXml(s, doc) {
+ if (window.ActiveXObject) {
+ doc = new ActiveXObject('Microsoft.XMLDOM');
+ doc.async = 'false';
+ doc.loadXML(s);
+ }
+ else
+ doc = (new DOMParser()).parseFromString(s, 'text/xml');
+ return (doc && doc.documentElement && doc.documentElement.tagName != 'parsererror') ? doc : null;
+ };
+ };
};
/**
@@ -340,9 +365,9 @@ $.fn.ajaxSubmit = function(options) {
* The advantages of using this method instead of ajaxSubmit() are:
*
* 1: This method will include coordinates for <input type="image" /> elements (if the element
- * is used to submit the form).
+ * is used to submit the form).
* 2. This method will include the submit element's name/value data (for the element that was
- * used to submit the form).
+ * used to submit the form).
* 3. This method binds the submit() method to the form for you.
*
* The options argument for ajaxForm works exactly as it does for ajaxSubmit. ajaxForm merely
@@ -350,40 +375,42 @@ $.fn.ajaxSubmit = function(options) {
* the form itself.
*/
$.fn.ajaxForm = function(options) {
- return this.ajaxFormUnbind().bind('submit.form-plugin',function() {
- $(this).ajaxSubmit(options);
- return false;
- }).each(function() {
- // store options in hash
- $(":submit,input:image", this).bind('click.form-plugin',function(e) {
- var form = this.form;
- form.clk = this;
- if (this.type == 'image') {
- if (e.offsetX != undefined) {
- form.clk_x = e.offsetX;
- form.clk_y = e.offsetY;
- } else if (typeof $.fn.offset == 'function') { // try to use dimensions plugin
- var offset = $(this).offset();
- form.clk_x = e.pageX - offset.left;
- form.clk_y = e.pageY - offset.top;
- } else {
- form.clk_x = e.pageX - this.offsetLeft;
- form.clk_y = e.pageY - this.offsetTop;
- }
- }
- // clear form vars
- setTimeout(function() { form.clk = form.clk_x = form.clk_y = null; }, 10);
- });
- });
+ return this.ajaxFormUnbind().bind('submit.form-plugin', function() {
+ $(this).ajaxSubmit(options);
+ return false;
+ }).bind('click.form-plugin', function(e) {
+ var target = e.target;
+ var $el = $(target);
+ if (!($el.is(":submit,input:image"))) {
+ // is this a child element of the submit el? (ex: a span within a button)
+ var t = $el.closest(':submit');
+ if (t.length == 0)
+ return;
+ target = t[0];
+ }
+ var form = this;
+ form.clk = target;
+ if (target.type == 'image') {
+ if (e.offsetX != undefined) {
+ form.clk_x = e.offsetX;
+ form.clk_y = e.offsetY;
+ } else if (typeof $.fn.offset == 'function') { // try to use dimensions plugin
+ var offset = $el.offset();
+ form.clk_x = e.pageX - offset.left;
+ form.clk_y = e.pageY - offset.top;
+ } else {
+ form.clk_x = e.pageX - target.offsetLeft;
+ form.clk_y = e.pageY - target.offsetTop;
+ }
+ }
+ // clear form vars
+ setTimeout(function() { form.clk = form.clk_x = form.clk_y = null; }, 100);
+ });
};
// ajaxFormUnbind unbinds the event handlers that were bound by ajaxForm
$.fn.ajaxFormUnbind = function() {
- this.unbind('submit.form-plugin');
- return this.each(function() {
- $(":submit,input:image", this).unbind('click.form-plugin');
- });
-
+ return this.unbind('submit.form-plugin click.form-plugin');
};
/**
@@ -398,44 +425,44 @@ $.fn.ajaxFormUnbind = function() {
* ajaxSubmit() and ajaxForm() methods.
*/
$.fn.formToArray = function(semantic) {
- var a = [];
- if (this.length == 0) return a;
-
- var form = this[0];
- var els = semantic ? form.getElementsByTagName('*') : form.elements;
- if (!els) return a;
- for(var i=0, max=els.length; i < max; i++) {
- var el = els[i];
- var n = el.name;
- if (!n) continue;
-
- if (semantic && form.clk && el.type == "image") {
- // handle image inputs on the fly when semantic == true
- if(!el.disabled && form.clk == el)
- a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
- continue;
- }
-
- var v = $.fieldValue(el, true);
- if (v && v.constructor == Array) {
- for(var j=0, jmax=v.length; j < jmax; j++)
- a.push({name: n, value: v[j]});
- }
- else if (v !== null && typeof v != 'undefined')
- a.push({name: n, value: v});
- }
-
- if (!semantic && form.clk) {
- // input type=='image' are not found in elements array! handle them here
- var inputs = form.getElementsByTagName("input");
- for(var i=0, max=inputs.length; i < max; i++) {
- var input = inputs[i];
- var n = input.name;
- if(n && !input.disabled && input.type == "image" && form.clk == input)
- a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
- }
- }
- return a;
+ var a = [];
+ if (this.length == 0) return a;
+
+ var form = this[0];
+ var els = semantic ? form.getElementsByTagName('*') : form.elements;
+ if (!els) return a;
+ for(var i=0, max=els.length; i < max; i++) {
+ var el = els[i];
+ var n = el.name;
+ if (!n) continue;
+
+ if (semantic && form.clk && el.type == "image") {
+ // handle image inputs on the fly when semantic == true
+ if(!el.disabled && form.clk == el) {
+ a.push({name: n, value: $(el).val()});
+ a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
+ }
+ continue;
+ }
+
+ var v = $.fieldValue(el, true);
+ if (v && v.constructor == Array) {
+ for(var j=0, jmax=v.length; j < jmax; j++)
+ a.push({name: n, value: v[j]});
+ }
+ else if (v !== null && typeof v != 'undefined')
+ a.push({name: n, value: v});
+ }
+
+ if (!semantic && form.clk) {
+ // input type=='image' are not found in elements array! handle it here
+ var $input = $(form.clk), input = $input[0], n = input.name;
+ if (n && !input.disabled && input.type == 'image') {
+ a.push({name: n, value: $input.val()});
+ a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
+ }
+ }
+ return a;
};
/**
@@ -443,8 +470,8 @@ $.fn.formToArray = function(semantic) {
* in the format: name1=value1&amp;name2=value2
*/
$.fn.formSerialize = function(semantic) {
- //hand off to jQuery.param for proper encoding
- return $.param(this.formToArray(semantic));
+ //hand off to jQuery.param for proper encoding
+ return $.param(this.formToArray(semantic));
};
/**
@@ -452,32 +479,32 @@ $.fn.formSerialize = function(semantic) {
* This method will return a string in the format: name1=value1&amp;name2=value2
*/
$.fn.fieldSerialize = function(successful) {
- var a = [];
- this.each(function() {
- var n = this.name;
- if (!n) return;
- var v = $.fieldValue(this, successful);
- if (v && v.constructor == Array) {
- for (var i=0,max=v.length; i < max; i++)
- a.push({name: n, value: v[i]});
- }
- else if (v !== null && typeof v != 'undefined')
- a.push({name: this.name, value: v});
- });
- //hand off to jQuery.param for proper encoding
- return $.param(a);
+ var a = [];
+ this.each(function() {
+ var n = this.name;
+ if (!n) return;
+ var v = $.fieldValue(this, successful);
+ if (v && v.constructor == Array) {
+ for (var i=0,max=v.length; i < max; i++)
+ a.push({name: n, value: v[i]});
+ }
+ else if (v !== null && typeof v != 'undefined')
+ a.push({name: this.name, value: v});
+ });
+ //hand off to jQuery.param for proper encoding
+ return $.param(a);
};
/**
* Returns the value(s) of the element in the matched set. For example, consider the following form:
*
* <form><fieldset>
- * <input name="A" type="text" />
- * <input name="A" type="text" />
- * <input name="B" type="checkbox" value="B1" />
- * <input name="B" type="checkbox" value="B2"/>
- * <input name="C" type="radio" value="C1" />
- * <input name="C" type="radio" value="C2" />
+ * <input name="A" type="text" />
+ * <input name="A" type="text" />
+ * <input name="B" type="checkbox" value="B1" />
+ * <input name="B" type="checkbox" value="B2"/>
+ * <input name="C" type="radio" value="C1" />
+ * <input name="C" type="radio" value="C2" />
* </fieldset></form>
*
* var v = $(':text').fieldValue();
@@ -504,50 +531,51 @@ $.fn.fieldSerialize = function(successful) {
* for each element is returned.
*
* Note: This method *always* returns an array. If no valid value can be determined the
- * array will be empty, otherwise it will contain one or more values.
+ * array will be empty, otherwise it will contain one or more values.
*/
$.fn.fieldValue = function(successful) {
- for (var val=[], i=0, max=this.length; i < max; i++) {
- var el = this[i];
- var v = $.fieldValue(el, successful);
- if (v === null || typeof v == 'undefined' || (v.constructor == Array && !v.length))
- continue;
- v.constructor == Array ? $.merge(val, v) : val.push(v);
- }
- return val;
+ for (var val=[], i=0, max=this.length; i < max; i++) {
+ var el = this[i];
+ var v = $.fieldValue(el, successful);
+ if (v === null || typeof v == 'undefined' || (v.constructor == Array && !v.length))
+ continue;
+ v.constructor == Array ? $.merge(val, v) : val.push(v);
+ }
+ return val;
};
/**
* Returns the value of the field element.
*/
$.fieldValue = function(el, successful) {
- var n = el.name, t = el.type, tag = el.tagName.toLowerCase();
- if (typeof successful == 'undefined') successful = true;
-
- if (successful && (!n || el.disabled || t == 'reset' || t == 'button' ||
- (t == 'checkbox' || t == 'radio') && !el.checked ||
- (t == 'submit' || t == 'image') && el.form && el.form.clk != el ||
- tag == 'select' && el.selectedIndex == -1))
- return null;
-
- if (tag == 'select') {
- var index = el.selectedIndex;
- if (index < 0) return null;
- var a = [], ops = el.options;
- var one = (t == 'select-one');
- var max = (one ? index+1 : ops.length);
- for(var i=(one ? index : 0); i < max; i++) {
- var op = ops[i];
- if (op.selected) {
- // extra pain for IE...
- var v = $.browser.msie && !(op.attributes['value'].specified) ? op.text : op.value;
- if (one) return v;
- a.push(v);
- }
- }
- return a;
- }
- return el.value;
+ var n = el.name, t = el.type, tag = el.tagName.toLowerCase();
+ if (typeof successful == 'undefined') successful = true;
+
+ if (successful && (!n || el.disabled || t == 'reset' || t == 'button' ||
+ (t == 'checkbox' || t == 'radio') && !el.checked ||
+ (t == 'submit' || t == 'image') && el.form && el.form.clk != el ||
+ tag == 'select' && el.selectedIndex == -1))
+ return null;
+
+ if (tag == 'select') {
+ var index = el.selectedIndex;
+ if (index < 0) return null;
+ var a = [], ops = el.options;
+ var one = (t == 'select-one');
+ var max = (one ? index+1 : ops.length);
+ for(var i=(one ? index : 0); i < max; i++) {
+ var op = ops[i];
+ if (op.selected) {
+ var v = op.value;
+ if (!v) // extra pain for IE...
+ v = (op.attributes && op.attributes['value'] && !(op.attributes['value'].specified)) ? op.text : op.value;
+ if (one) return v;
+ a.push(v);
+ }
+ }
+ return a;
+ }
+ return el.value;
};
/**
@@ -559,46 +587,46 @@ $.fieldValue = function(el, successful) {
* - button elements will *not* be effected
*/
$.fn.clearForm = function() {
- return this.each(function() {
- $('input,select,textarea', this).clearFields();
- });
+ return this.each(function() {
+ $('input,select,textarea', this).clearFields();
+ });
};
/**
* Clears the selected form elements.
*/
$.fn.clearFields = $.fn.clearInputs = function() {
- return this.each(function() {
- var t = this.type, tag = this.tagName.toLowerCase();
- if (t == 'text' || t == 'password' || tag == 'textarea')
- this.value = '';
- else if (t == 'checkbox' || t == 'radio')
- this.checked = false;
- else if (tag == 'select')
- this.selectedIndex = -1;
- });
+ return this.each(function() {
+ var t = this.type, tag = this.tagName.toLowerCase();
+ if (t == 'text' || t == 'password' || tag == 'textarea')
+ this.value = '';
+ else if (t == 'checkbox' || t == 'radio')
+ this.checked = false;
+ else if (tag == 'select')
+ this.selectedIndex = -1;
+ });
};
/**
* Resets the form data. Causes all form elements to be reset to their original value.
*/
$.fn.resetForm = function() {
- return this.each(function() {
- // guard against an input with the name of 'reset'
- // note that IE reports the reset function as an 'object'
- if (typeof this.reset == 'function' || (typeof this.reset == 'object' && !this.reset.nodeType))
- this.reset();
- });
+ return this.each(function() {
+ // guard against an input with the name of 'reset'
+ // note that IE reports the reset function as an 'object'
+ if (typeof this.reset == 'function' || (typeof this.reset == 'object' && !this.reset.nodeType))
+ this.reset();
+ });
};
/**
* Enables or disables any matching elements.
*/
$.fn.enable = function(b) {
- if (b == undefined) b = true;
- return this.each(function() {
- this.disabled = !b
- });
+ if (b == undefined) b = true;
+ return this.each(function() {
+ this.disabled = !b;
+ });
};
/**
@@ -606,27 +634,27 @@ $.fn.enable = function(b) {
* selects/deselects and matching option elements.
*/
$.fn.selected = function(select) {
- if (select == undefined) select = true;
- return this.each(function() {
- var t = this.type;
- if (t == 'checkbox' || t == 'radio')
- this.checked = select;
- else if (this.tagName.toLowerCase() == 'option') {
- var $sel = $(this).parent('select');
- if (select && $sel[0] && $sel[0].type == 'select-one') {
- // deselect all other options
- $sel.find('option').selected(false);
- }
- this.selected = select;
- }
- });
+ if (select == undefined) select = true;
+ return this.each(function() {
+ var t = this.type;
+ if (t == 'checkbox' || t == 'radio')
+ this.checked = select;
+ else if (this.tagName.toLowerCase() == 'option') {
+ var $sel = $(this).parent('select');
+ if (select && $sel[0] && $sel[0].type == 'select-one') {
+ // deselect all other options
+ $sel.find('option').selected(false);
+ }
+ this.selected = select;
+ }
+ });
};
// helper fn for console logging
// set $.fn.ajaxSubmit.debug to true to enable debug logging
function log() {
- if ($.fn.ajaxSubmit.debug && window.console && window.console.log)
- window.console.log('[jquery.form] ' + Array.prototype.join.call(arguments,''));
+ if ($.fn.ajaxSubmit.debug && window.console && window.console.log)
+ window.console.log('[jquery.form] ' + Array.prototype.join.call(arguments,''));
};
-})(jQuery); \ No newline at end of file
+})(jQuery);
diff --git a/share/www/script/jquery.js b/share/www/script/jquery.js
index 92635743..fff67764 100644
--- a/share/www/script/jquery.js
+++ b/share/www/script/jquery.js
@@ -1,975 +1,1462 @@
/*!
- * jQuery JavaScript Library v1.3.2
+ * jQuery JavaScript Library v1.4.2
* http://jquery.com/
*
- * Copyright (c) 2009 John Resig
- * Dual licensed under the MIT and GPL licenses.
- * http://docs.jquery.com/License
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
*
- * Date: 2009-02-19 17:34:21 -0500 (Thu, 19 Feb 2009)
- * Revision: 6246
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
*/
-(function(){
+(function( window, undefined ) {
+
+// Define a local copy of jQuery
+var jQuery = function( selector, context ) {
+ // The jQuery object is actually just the init constructor 'enhanced'
+ return new jQuery.fn.init( selector, context );
+ },
-var
- // Will speed up references to window, and allows munging its name.
- window = this,
- // Will speed up references to undefined, and allows munging its name.
- undefined,
// Map over jQuery in case of overwrite
_jQuery = window.jQuery,
+
// Map over the $ in case of overwrite
_$ = window.$,
- jQuery = window.jQuery = window.$ = function( selector, context ) {
- // The jQuery object is actually just the init constructor 'enhanced'
- return new jQuery.fn.init( selector, context );
- },
+ // Use the correct document accordingly with window argument (sandbox)
+ document = window.document,
+
+ // A central reference to the root jQuery(document)
+ rootjQuery,
// A simple way to check for HTML strings or ID strings
// (both of which we optimize for)
- quickExpr = /^[^<]*(<(.|\s)+>)[^>]*$|^#([\w-]+)$/,
+ quickExpr = /^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,
+
// Is it a simple selector
- isSimple = /^.[^:#\[\.,]*$/;
+ isSimple = /^.[^:#\[\.,]*$/,
+
+ // Check if a string has a non-whitespace character in it
+ rnotwhite = /\S/,
+
+ // Used for trimming whitespace
+ rtrim = /^(\s|\u00A0)+|(\s|\u00A0)+$/g,
+
+ // Match a standalone tag
+ rsingleTag = /^<(\w+)\s*\/?>(?:<\/\1>)?$/,
+
+ // Keep a UserAgent string for use with jQuery.browser
+ userAgent = navigator.userAgent,
+
+ // For matching the engine and version of the browser
+ browserMatch,
+
+ // Has the ready events already been bound?
+ readyBound = false,
+
+ // The functions to execute on DOM ready
+ readyList = [],
+
+ // The ready event handler
+ DOMContentLoaded,
+
+ // Save a reference to some core methods
+ toString = Object.prototype.toString,
+ hasOwnProperty = Object.prototype.hasOwnProperty,
+ push = Array.prototype.push,
+ slice = Array.prototype.slice,
+ indexOf = Array.prototype.indexOf;
jQuery.fn = jQuery.prototype = {
init: function( selector, context ) {
- // Make sure that a selection was provided
- selector = selector || document;
+ var match, elem, ret, doc;
+
+ // Handle $(""), $(null), or $(undefined)
+ if ( !selector ) {
+ return this;
+ }
// Handle $(DOMElement)
if ( selector.nodeType ) {
- this[0] = selector;
+ this.context = this[0] = selector;
+ this.length = 1;
+ return this;
+ }
+
+ // The body element only exists once, optimize finding it
+ if ( selector === "body" && !context ) {
+ this.context = document;
+ this[0] = document.body;
+ this.selector = "body";
this.length = 1;
- this.context = selector;
return this;
}
+
// Handle HTML strings
if ( typeof selector === "string" ) {
// Are we dealing with HTML string or an ID?
- var match = quickExpr.exec( selector );
+ match = quickExpr.exec( selector );
// Verify a match, and that no context was specified for #id
if ( match && (match[1] || !context) ) {
// HANDLE: $(html) -> $(array)
- if ( match[1] )
- selector = jQuery.clean( [ match[1] ], context );
+ if ( match[1] ) {
+ doc = (context ? context.ownerDocument || context : document);
+
+ // If a single string is passed in and it's a single tag
+ // just do a createElement and skip the rest
+ ret = rsingleTag.exec( selector );
+
+ if ( ret ) {
+ if ( jQuery.isPlainObject( context ) ) {
+ selector = [ document.createElement( ret[1] ) ];
+ jQuery.fn.attr.call( selector, context, true );
+ } else {
+ selector = [ doc.createElement( ret[1] ) ];
+ }
+
+ } else {
+ ret = buildFragment( [ match[1] ], [ doc ] );
+ selector = (ret.cacheable ? ret.fragment.cloneNode(true) : ret.fragment).childNodes;
+ }
+
+ return jQuery.merge( this, selector );
+
// HANDLE: $("#id")
- else {
- var elem = document.getElementById( match[3] );
-
- // Handle the case where IE and Opera return items
- // by name instead of ID
- if ( elem && elem.id != match[3] )
- return jQuery().find( selector );
-
- // Otherwise, we inject the element directly into the jQuery object
- var ret = jQuery( elem || [] );
- ret.context = document;
- ret.selector = selector;
- return ret;
+ } else {
+ elem = document.getElementById( match[2] );
+
+ if ( elem ) {
+ // Handle the case where IE and Opera return items
+ // by name instead of ID
+ if ( elem.id !== match[2] ) {
+ return rootjQuery.find( selector );
+ }
+
+ // Otherwise, we inject the element directly into the jQuery object
+ this.length = 1;
+ this[0] = elem;
+ }
+
+ this.context = document;
+ this.selector = selector;
+ return this;
}
- // HANDLE: $(expr, [context])
- // (which is just equivalent to: $(content).find(expr)
- } else
+ // HANDLE: $("TAG")
+ } else if ( !context && /^\w+$/.test( selector ) ) {
+ this.selector = selector;
+ this.context = document;
+ selector = document.getElementsByTagName( selector );
+ return jQuery.merge( this, selector );
+
+ // HANDLE: $(expr, $(...))
+ } else if ( !context || context.jquery ) {
+ return (context || rootjQuery).find( selector );
+
+ // HANDLE: $(expr, context)
+ // (which is just equivalent to: $(context).find(expr)
+ } else {
return jQuery( context ).find( selector );
+ }
// HANDLE: $(function)
// Shortcut for document ready
- } else if ( jQuery.isFunction( selector ) )
- return jQuery( document ).ready( selector );
+ } else if ( jQuery.isFunction( selector ) ) {
+ return rootjQuery.ready( selector );
+ }
- // Make sure that old selector state is passed along
- if ( selector.selector && selector.context ) {
+ if (selector.selector !== undefined) {
this.selector = selector.selector;
this.context = selector.context;
}
- return this.setArray(jQuery.isArray( selector ) ?
- selector :
- jQuery.makeArray(selector));
+ return jQuery.makeArray( selector, this );
},
// Start with an empty selector
selector: "",
// The current version of jQuery being used
- jquery: "1.3.2",
+ jquery: "1.4.2",
+
+ // The default length of a jQuery object is 0
+ length: 0,
// The number of elements contained in the matched element set
size: function() {
return this.length;
},
+ toArray: function() {
+ return slice.call( this, 0 );
+ },
+
// Get the Nth element in the matched element set OR
// Get the whole matched element set as a clean array
get: function( num ) {
- return num === undefined ?
+ return num == null ?
// Return a 'clean' array
- Array.prototype.slice.call( this ) :
+ this.toArray() :
// Return just the object
- this[ num ];
+ ( num < 0 ? this.slice(num)[ 0 ] : this[ num ] );
},
// Take an array of elements and push it onto the stack
// (returning the new matched element set)
pushStack: function( elems, name, selector ) {
// Build a new jQuery matched element set
- var ret = jQuery( elems );
+ var ret = jQuery();
+
+ if ( jQuery.isArray( elems ) ) {
+ push.apply( ret, elems );
+
+ } else {
+ jQuery.merge( ret, elems );
+ }
// Add the old object onto the stack (as a reference)
ret.prevObject = this;
ret.context = this.context;
- if ( name === "find" )
+ if ( name === "find" ) {
ret.selector = this.selector + (this.selector ? " " : "") + selector;
- else if ( name )
+ } else if ( name ) {
ret.selector = this.selector + "." + name + "(" + selector + ")";
+ }
// Return the newly-formed element set
return ret;
},
- // Force the current matched set of elements to become
- // the specified array of elements (destroying the stack in the process)
- // You should use pushStack() in order to do this, but maintain the stack
- setArray: function( elems ) {
- // Resetting the length to 0, then using the native Array push
- // is a super-fast way to populate an object with array-like properties
- this.length = 0;
- Array.prototype.push.apply( this, elems );
-
- return this;
- },
-
// Execute a callback for every element in the matched set.
// (You can seed the arguments with an array of args, but this is
// only used internally.)
each: function( callback, args ) {
return jQuery.each( this, callback, args );
},
+
+ ready: function( fn ) {
+ // Attach the listeners
+ jQuery.bindReady();
- // Determine the position of an element within
- // the matched set of elements
- index: function( elem ) {
- // Locate the position of the desired element
- return jQuery.inArray(
- // If it receives a jQuery object, the first element is used
- elem && elem.jquery ? elem[0] : elem
- , this );
- },
+ // If the DOM is already ready
+ if ( jQuery.isReady ) {
+ // Execute the function immediately
+ fn.call( document, jQuery );
- attr: function( name, value, type ) {
- var options = name;
+ // Otherwise, remember the function for later
+ } else if ( readyList ) {
+ // Add the function to the wait list
+ readyList.push( fn );
+ }
- // Look for the case where we're accessing a style value
- if ( typeof name === "string" )
- if ( value === undefined )
- return this[0] && jQuery[ type || "attr" ]( this[0], name );
+ return this;
+ },
+
+ eq: function( i ) {
+ return i === -1 ?
+ this.slice( i ) :
+ this.slice( i, +i + 1 );
+ },
- else {
- options = {};
- options[ name ] = value;
- }
+ first: function() {
+ return this.eq( 0 );
+ },
- // Check to see if we're setting style values
- return this.each(function(i){
- // Set all the styles
- for ( name in options )
- jQuery.attr(
- type ?
- this.style :
- this,
- name, jQuery.prop( this, options[ name ], type, i, name )
- );
- });
+ last: function() {
+ return this.eq( -1 );
},
- css: function( key, value ) {
- // ignore negative width and height values
- if ( (key == 'width' || key == 'height') && parseFloat(value) < 0 )
- value = undefined;
- return this.attr( key, value, "curCSS" );
+ slice: function() {
+ return this.pushStack( slice.apply( this, arguments ),
+ "slice", slice.call(arguments).join(",") );
},
- text: function( text ) {
- if ( typeof text !== "object" && text != null )
- return this.empty().append( (this[0] && this[0].ownerDocument || document).createTextNode( text ) );
+ map: function( callback ) {
+ return this.pushStack( jQuery.map(this, function( elem, i ) {
+ return callback.call( elem, i, elem );
+ }));
+ },
+
+ end: function() {
+ return this.prevObject || jQuery(null);
+ },
- var ret = "";
+ // For internal use only.
+ // Behaves like an Array's method, not like a jQuery method.
+ push: push,
+ sort: [].sort,
+ splice: [].splice
+};
- jQuery.each( text || this, function(){
- jQuery.each( this.childNodes, function(){
- if ( this.nodeType != 8 )
- ret += this.nodeType != 1 ?
- this.nodeValue :
- jQuery.fn.text( [ this ] );
- });
- });
+// Give the init function the jQuery prototype for later instantiation
+jQuery.fn.init.prototype = jQuery.fn;
- return ret;
- },
+jQuery.extend = jQuery.fn.extend = function() {
+ // copy reference to target object
+ var target = arguments[0] || {}, i = 1, length = arguments.length, deep = false, options, name, src, copy;
- wrapAll: function( html ) {
- if ( this[0] ) {
- // The elements to wrap the target around
- var wrap = jQuery( html, this[0].ownerDocument ).clone();
+ // Handle a deep copy situation
+ if ( typeof target === "boolean" ) {
+ deep = target;
+ target = arguments[1] || {};
+ // skip the boolean and the target
+ i = 2;
+ }
- if ( this[0].parentNode )
- wrap.insertBefore( this[0] );
+ // Handle case when target is a string or something (possible in deep copy)
+ if ( typeof target !== "object" && !jQuery.isFunction(target) ) {
+ target = {};
+ }
- wrap.map(function(){
- var elem = this;
+ // extend jQuery itself if only one argument is passed
+ if ( length === i ) {
+ target = this;
+ --i;
+ }
- while ( elem.firstChild )
- elem = elem.firstChild;
+ for ( ; i < length; i++ ) {
+ // Only deal with non-null/undefined values
+ if ( (options = arguments[ i ]) != null ) {
+ // Extend the base object
+ for ( name in options ) {
+ src = target[ name ];
+ copy = options[ name ];
- return elem;
- }).append(this);
+ // Prevent never-ending loop
+ if ( target === copy ) {
+ continue;
+ }
+
+ // Recurse if we're merging object literal values or arrays
+ if ( deep && copy && ( jQuery.isPlainObject(copy) || jQuery.isArray(copy) ) ) {
+ var clone = src && ( jQuery.isPlainObject(src) || jQuery.isArray(src) ) ? src
+ : jQuery.isArray(copy) ? [] : {};
+
+ // Never move original objects, clone them
+ target[ name ] = jQuery.extend( deep, clone, copy );
+
+ // Don't bring in undefined values
+ } else if ( copy !== undefined ) {
+ target[ name ] = copy;
+ }
+ }
}
+ }
- return this;
- },
+ // Return the modified object
+ return target;
+};
- wrapInner: function( html ) {
- return this.each(function(){
- jQuery( this ).contents().wrapAll( html );
- });
+jQuery.extend({
+ noConflict: function( deep ) {
+ window.$ = _$;
+
+ if ( deep ) {
+ window.jQuery = _jQuery;
+ }
+
+ return jQuery;
},
+
+ // Is the DOM ready to be used? Set to true once it occurs.
+ isReady: false,
+
+ // Handle when the DOM is ready
+ ready: function() {
+ // Make sure that the DOM is not already loaded
+ if ( !jQuery.isReady ) {
+ // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443).
+ if ( !document.body ) {
+ return setTimeout( jQuery.ready, 13 );
+ }
- wrap: function( html ) {
- return this.each(function(){
- jQuery( this ).wrapAll( html );
- });
+ // Remember that the DOM is ready
+ jQuery.isReady = true;
+
+ // If there are functions bound, to execute
+ if ( readyList ) {
+ // Execute all of them
+ var fn, i = 0;
+ while ( (fn = readyList[ i++ ]) ) {
+ fn.call( document, jQuery );
+ }
+
+ // Reset the list of functions
+ readyList = null;
+ }
+
+ // Trigger any bound ready events
+ if ( jQuery.fn.triggerHandler ) {
+ jQuery( document ).triggerHandler( "ready" );
+ }
+ }
},
+
+ bindReady: function() {
+ if ( readyBound ) {
+ return;
+ }
- append: function() {
- return this.domManip(arguments, true, function(elem){
- if (this.nodeType == 1)
- this.appendChild( elem );
- });
+ readyBound = true;
+
+ // Catch cases where $(document).ready() is called after the
+ // browser event has already occurred.
+ if ( document.readyState === "complete" ) {
+ return jQuery.ready();
+ }
+
+ // Mozilla, Opera and webkit nightlies currently support this event
+ if ( document.addEventListener ) {
+ // Use the handy event callback
+ document.addEventListener( "DOMContentLoaded", DOMContentLoaded, false );
+
+ // A fallback to window.onload, that will always work
+ window.addEventListener( "load", jQuery.ready, false );
+
+ // If IE event model is used
+ } else if ( document.attachEvent ) {
+ // ensure firing before onload,
+ // maybe late but safe also for iframes
+ document.attachEvent("onreadystatechange", DOMContentLoaded);
+
+ // A fallback to window.onload, that will always work
+ window.attachEvent( "onload", jQuery.ready );
+
+ // If IE and not a frame
+ // continually check to see if the document is ready
+ var toplevel = false;
+
+ try {
+ toplevel = window.frameElement == null;
+ } catch(e) {}
+
+ if ( document.documentElement.doScroll && toplevel ) {
+ doScrollCheck();
+ }
+ }
},
- prepend: function() {
- return this.domManip(arguments, true, function(elem){
- if (this.nodeType == 1)
- this.insertBefore( elem, this.firstChild );
- });
+ // See test/unit/core.js for details concerning isFunction.
+ // Since version 1.3, DOM methods and functions like alert
+ // aren't supported. They return false on IE (#2968).
+ isFunction: function( obj ) {
+ return toString.call(obj) === "[object Function]";
},
- before: function() {
- return this.domManip(arguments, false, function(elem){
- this.parentNode.insertBefore( elem, this );
- });
+ isArray: function( obj ) {
+ return toString.call(obj) === "[object Array]";
},
- after: function() {
- return this.domManip(arguments, false, function(elem){
- this.parentNode.insertBefore( elem, this.nextSibling );
- });
+ isPlainObject: function( obj ) {
+ // Must be an Object.
+ // Because of IE, we also have to check the presence of the constructor property.
+ // Make sure that DOM nodes and window objects don't pass through, as well
+ if ( !obj || toString.call(obj) !== "[object Object]" || obj.nodeType || obj.setInterval ) {
+ return false;
+ }
+
+ // Not own constructor property must be Object
+ if ( obj.constructor
+ && !hasOwnProperty.call(obj, "constructor")
+ && !hasOwnProperty.call(obj.constructor.prototype, "isPrototypeOf") ) {
+ return false;
+ }
+
+ // Own properties are enumerated firstly, so to speed up,
+ // if last one is own, then all properties are own.
+
+ var key;
+ for ( key in obj ) {}
+
+ return key === undefined || hasOwnProperty.call( obj, key );
},
- end: function() {
- return this.prevObject || jQuery( [] );
+ isEmptyObject: function( obj ) {
+ for ( var name in obj ) {
+ return false;
+ }
+ return true;
+ },
+
+ error: function( msg ) {
+ throw msg;
},
+
+ parseJSON: function( data ) {
+ if ( typeof data !== "string" || !data ) {
+ return null;
+ }
- // For internal use only.
- // Behaves like an Array's method, not like a jQuery method.
- push: [].push,
- sort: [].sort,
- splice: [].splice,
+ // Make sure leading/trailing whitespace is removed (IE can't handle it)
+ data = jQuery.trim( data );
+
+ // Make sure the incoming data is actual JSON
+ // Logic borrowed from http://json.org/json2.js
+ if ( /^[\],:{}\s]*$/.test(data.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, "@")
+ .replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, "]")
+ .replace(/(?:^|:|,)(?:\s*\[)+/g, "")) ) {
+
+ // Try to use the native JSON parser first
+ return window.JSON && window.JSON.parse ?
+ window.JSON.parse( data ) :
+ (new Function("return " + data))();
- find: function( selector ) {
- if ( this.length === 1 ) {
- var ret = this.pushStack( [], "find", selector );
- ret.length = 0;
- jQuery.find( selector, this[0], ret );
- return ret;
} else {
- return this.pushStack( jQuery.unique(jQuery.map(this, function(elem){
- return jQuery.find( selector, elem );
- })), "find", selector );
+ jQuery.error( "Invalid JSON: " + data );
}
},
- clone: function( events ) {
- // Do the clone
- var ret = this.map(function(){
- if ( !jQuery.support.noCloneEvent && !jQuery.isXMLDoc(this) ) {
- // IE copies events bound via attachEvent when
- // using cloneNode. Calling detachEvent on the
- // clone will also remove the events from the orignal
- // In order to get around this, we use innerHTML.
- // Unfortunately, this means some modifications to
- // attributes in IE that are actually only stored
- // as properties will not be copied (such as the
- // the name attribute on an input).
- var html = this.outerHTML;
- if ( !html ) {
- var div = this.ownerDocument.createElement("div");
- div.appendChild( this.cloneNode(true) );
- html = div.innerHTML;
- }
+ noop: function() {},
- return jQuery.clean([html.replace(/ jQuery\d+="(?:\d+|null)"/g, "").replace(/^\s*/, "")])[0];
- } else
- return this.cloneNode(true);
- });
+ // Evalulates a script in a global context
+ globalEval: function( data ) {
+ if ( data && rnotwhite.test(data) ) {
+ // Inspired by code by Andrea Giammarchi
+ // http://webreflection.blogspot.com/2007/08/global-scope-evaluation-and-dom.html
+ var head = document.getElementsByTagName("head")[0] || document.documentElement,
+ script = document.createElement("script");
- // Copy the events from the original to the clone
- if ( events === true ) {
- var orig = this.find("*").andSelf(), i = 0;
+ script.type = "text/javascript";
- ret.find("*").andSelf().each(function(){
- if ( this.nodeName !== orig[i].nodeName )
- return;
+ if ( jQuery.support.scriptEval ) {
+ script.appendChild( document.createTextNode( data ) );
+ } else {
+ script.text = data;
+ }
+
+ // Use insertBefore instead of appendChild to circumvent an IE6 bug.
+ // This arises when a base node is used (#2709).
+ head.insertBefore( script, head.firstChild );
+ head.removeChild( script );
+ }
+ },
+
+ nodeName: function( elem, name ) {
+ return elem.nodeName && elem.nodeName.toUpperCase() === name.toUpperCase();
+ },
- var events = jQuery.data( orig[i], "events" );
+ // args is for internal usage only
+ each: function( object, callback, args ) {
+ var name, i = 0,
+ length = object.length,
+ isObj = length === undefined || jQuery.isFunction(object);
- for ( var type in events ) {
- for ( var handler in events[ type ] ) {
- jQuery.event.add( this, type, events[ type ][ handler ], events[ type ][ handler ].data );
+ if ( args ) {
+ if ( isObj ) {
+ for ( name in object ) {
+ if ( callback.apply( object[ name ], args ) === false ) {
+ break;
}
}
+ } else {
+ for ( ; i < length; ) {
+ if ( callback.apply( object[ i++ ], args ) === false ) {
+ break;
+ }
+ }
+ }
- i++;
- });
+ // A special, fast, case for the most common use of each
+ } else {
+ if ( isObj ) {
+ for ( name in object ) {
+ if ( callback.call( object[ name ], name, object[ name ] ) === false ) {
+ break;
+ }
+ }
+ } else {
+ for ( var value = object[0];
+ i < length && callback.call( value, i, value ) !== false; value = object[++i] ) {}
+ }
}
- // Return the cloned set
- return ret;
+ return object;
},
- filter: function( selector ) {
- return this.pushStack(
- jQuery.isFunction( selector ) &&
- jQuery.grep(this, function(elem, i){
- return selector.call( elem, i );
- }) ||
-
- jQuery.multiFilter( selector, jQuery.grep(this, function(elem){
- return elem.nodeType === 1;
- }) ), "filter", selector );
- },
-
- closest: function( selector ) {
- var pos = jQuery.expr.match.POS.test( selector ) ? jQuery(selector) : null,
- closer = 0;
-
- return this.map(function(){
- var cur = this;
- while ( cur && cur.ownerDocument ) {
- if ( pos ? pos.index(cur) > -1 : jQuery(cur).is(selector) ) {
- jQuery.data(cur, "closest", closer);
- return cur;
- }
- cur = cur.parentNode;
- closer++;
- }
- });
+ trim: function( text ) {
+ return (text || "").replace( rtrim, "" );
},
- not: function( selector ) {
- if ( typeof selector === "string" )
- // test special case where just one selector is passed in
- if ( isSimple.test( selector ) )
- return this.pushStack( jQuery.multiFilter( selector, this, true ), "not", selector );
- else
- selector = jQuery.multiFilter( selector, this );
-
- var isArrayLike = selector.length && selector[selector.length - 1] !== undefined && !selector.nodeType;
- return this.filter(function() {
- return isArrayLike ? jQuery.inArray( this, selector ) < 0 : this != selector;
- });
- },
+ // results is for internal usage only
+ makeArray: function( array, results ) {
+ var ret = results || [];
- add: function( selector ) {
- return this.pushStack( jQuery.unique( jQuery.merge(
- this.get(),
- typeof selector === "string" ?
- jQuery( selector ) :
- jQuery.makeArray( selector )
- )));
- },
+ if ( array != null ) {
+ // The window, strings (and functions) also have 'length'
+ // The extra typeof function check is to prevent crashes
+ // in Safari 2 (See: #3039)
+ if ( array.length == null || typeof array === "string" || jQuery.isFunction(array) || (typeof array !== "function" && array.setInterval) ) {
+ push.call( ret, array );
+ } else {
+ jQuery.merge( ret, array );
+ }
+ }
- is: function( selector ) {
- return !!selector && jQuery.multiFilter( selector, this ).length > 0;
+ return ret;
},
- hasClass: function( selector ) {
- return !!selector && this.is( "." + selector );
+ inArray: function( elem, array ) {
+ if ( array.indexOf ) {
+ return array.indexOf( elem );
+ }
+
+ for ( var i = 0, length = array.length; i < length; i++ ) {
+ if ( array[ i ] === elem ) {
+ return i;
+ }
+ }
+
+ return -1;
},
- val: function( value ) {
- if ( value === undefined ) {
- var elem = this[0];
+ merge: function( first, second ) {
+ var i = first.length, j = 0;
- if ( elem ) {
- if( jQuery.nodeName( elem, 'option' ) )
- return (elem.attributes.value || {}).specified ? elem.value : elem.text;
-
- // We need to handle select boxes special
- if ( jQuery.nodeName( elem, "select" ) ) {
- var index = elem.selectedIndex,
- values = [],
- options = elem.options,
- one = elem.type == "select-one";
+ if ( typeof second.length === "number" ) {
+ for ( var l = second.length; j < l; j++ ) {
+ first[ i++ ] = second[ j ];
+ }
+
+ } else {
+ while ( second[j] !== undefined ) {
+ first[ i++ ] = second[ j++ ];
+ }
+ }
- // Nothing was selected
- if ( index < 0 )
- return null;
+ first.length = i;
- // Loop through all the selected options
- for ( var i = one ? index : 0, max = one ? index + 1 : options.length; i < max; i++ ) {
- var option = options[ i ];
+ return first;
+ },
- if ( option.selected ) {
- // Get the specifc value for the option
- value = jQuery(option).val();
+ grep: function( elems, callback, inv ) {
+ var ret = [];
- // We don't need an array for one selects
- if ( one )
- return value;
+ // Go through the array, only saving the items
+ // that pass the validator function
+ for ( var i = 0, length = elems.length; i < length; i++ ) {
+ if ( !inv !== !callback( elems[ i ], i ) ) {
+ ret.push( elems[ i ] );
+ }
+ }
- // Multi-Selects return an array
- values.push( value );
- }
- }
+ return ret;
+ },
- return values;
- }
+ // arg is for internal usage only
+ map: function( elems, callback, arg ) {
+ var ret = [], value;
- // Everything else, we just grab the value
- return (elem.value || "").replace(/\r/g, "");
+ // Go through the array, translating each of the items to their
+ // new value (or values).
+ for ( var i = 0, length = elems.length; i < length; i++ ) {
+ value = callback( elems[ i ], i, arg );
+ if ( value != null ) {
+ ret[ ret.length ] = value;
}
-
- return undefined;
}
- if ( typeof value === "number" )
- value += '';
+ return ret.concat.apply( [], ret );
+ },
- return this.each(function(){
- if ( this.nodeType != 1 )
- return;
+ // A global GUID counter for objects
+ guid: 1,
- if ( jQuery.isArray(value) && /radio|checkbox/.test( this.type ) )
- this.checked = (jQuery.inArray(this.value, value) >= 0 ||
- jQuery.inArray(this.name, value) >= 0);
+ proxy: function( fn, proxy, thisObject ) {
+ if ( arguments.length === 2 ) {
+ if ( typeof proxy === "string" ) {
+ thisObject = fn;
+ fn = thisObject[ proxy ];
+ proxy = undefined;
- else if ( jQuery.nodeName( this, "select" ) ) {
- var values = jQuery.makeArray(value);
+ } else if ( proxy && !jQuery.isFunction( proxy ) ) {
+ thisObject = proxy;
+ proxy = undefined;
+ }
+ }
- jQuery( "option", this ).each(function(){
- this.selected = (jQuery.inArray( this.value, values ) >= 0 ||
- jQuery.inArray( this.text, values ) >= 0);
- });
+ if ( !proxy && fn ) {
+ proxy = function() {
+ return fn.apply( thisObject || this, arguments );
+ };
+ }
- if ( !values.length )
- this.selectedIndex = -1;
+ // Set the guid of unique handler to the same of original handler, so it can be removed
+ if ( fn ) {
+ proxy.guid = fn.guid = fn.guid || proxy.guid || jQuery.guid++;
+ }
- } else
- this.value = value;
- });
+ // So proxy can be declared as an argument
+ return proxy;
},
- html: function( value ) {
- return value === undefined ?
- (this[0] ?
- this[0].innerHTML.replace(/ jQuery\d+="(?:\d+|null)"/g, "") :
- null) :
- this.empty().append( value );
- },
+ // Use of jQuery.browser is frowned upon.
+ // More details: http://docs.jquery.com/Utilities/jQuery.browser
+ uaMatch: function( ua ) {
+ ua = ua.toLowerCase();
- replaceWith: function( value ) {
- return this.after( value ).remove();
- },
+ var match = /(webkit)[ \/]([\w.]+)/.exec( ua ) ||
+ /(opera)(?:.*version)?[ \/]([\w.]+)/.exec( ua ) ||
+ /(msie) ([\w.]+)/.exec( ua ) ||
+ !/compatible/.test( ua ) && /(mozilla)(?:.*? rv:([\w.]+))?/.exec( ua ) ||
+ [];
- eq: function( i ) {
- return this.slice( i, +i + 1 );
+ return { browser: match[1] || "", version: match[2] || "0" };
},
- slice: function() {
- return this.pushStack( Array.prototype.slice.apply( this, arguments ),
- "slice", Array.prototype.slice.call(arguments).join(",") );
- },
+ browser: {}
+});
- map: function( callback ) {
- return this.pushStack( jQuery.map(this, function(elem, i){
- return callback.call( elem, i, elem );
- }));
- },
+browserMatch = jQuery.uaMatch( userAgent );
+if ( browserMatch.browser ) {
+ jQuery.browser[ browserMatch.browser ] = true;
+ jQuery.browser.version = browserMatch.version;
+}
- andSelf: function() {
- return this.add( this.prevObject );
- },
+// Deprecated, use jQuery.browser.webkit instead
+if ( jQuery.browser.webkit ) {
+ jQuery.browser.safari = true;
+}
- domManip: function( args, table, callback ) {
- if ( this[0] ) {
- var fragment = (this[0].ownerDocument || this[0]).createDocumentFragment(),
- scripts = jQuery.clean( args, (this[0].ownerDocument || this[0]), fragment ),
- first = fragment.firstChild;
+if ( indexOf ) {
+ jQuery.inArray = function( elem, array ) {
+ return indexOf.call( array, elem );
+ };
+}
- if ( first )
- for ( var i = 0, l = this.length; i < l; i++ )
- callback.call( root(this[i], first), this.length > 1 || i > 0 ?
- fragment.cloneNode(true) : fragment );
-
- if ( scripts )
- jQuery.each( scripts, evalScript );
- }
+// All jQuery objects should point back to these
+rootjQuery = jQuery(document);
- return this;
-
- function root( elem, cur ) {
- return table && jQuery.nodeName(elem, "table") && jQuery.nodeName(cur, "tr") ?
- (elem.getElementsByTagName("tbody")[0] ||
- elem.appendChild(elem.ownerDocument.createElement("tbody"))) :
- elem;
+// Cleanup functions for the document ready method
+if ( document.addEventListener ) {
+ DOMContentLoaded = function() {
+ document.removeEventListener( "DOMContentLoaded", DOMContentLoaded, false );
+ jQuery.ready();
+ };
+
+} else if ( document.attachEvent ) {
+ DOMContentLoaded = function() {
+ // Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443).
+ if ( document.readyState === "complete" ) {
+ document.detachEvent( "onreadystatechange", DOMContentLoaded );
+ jQuery.ready();
}
+ };
+}
+
+// The DOM ready check for Internet Explorer
+function doScrollCheck() {
+ if ( jQuery.isReady ) {
+ return;
}
-};
-// Give the init function the jQuery prototype for later instantiation
-jQuery.fn.init.prototype = jQuery.fn;
+ try {
+ // If IE is used, use the trick by Diego Perini
+ // http://javascript.nwbox.com/IEContentLoaded/
+ document.documentElement.doScroll("left");
+ } catch( error ) {
+ setTimeout( doScrollCheck, 1 );
+ return;
+ }
+
+ // and execute any waiting functions
+ jQuery.ready();
+}
function evalScript( i, elem ) {
- if ( elem.src )
+ if ( elem.src ) {
jQuery.ajax({
url: elem.src,
async: false,
dataType: "script"
});
-
- else
+ } else {
jQuery.globalEval( elem.text || elem.textContent || elem.innerHTML || "" );
+ }
- if ( elem.parentNode )
+ if ( elem.parentNode ) {
elem.parentNode.removeChild( elem );
+ }
}
-function now(){
- return +new Date;
+// Mutifunctional method to get and set values to a collection
+// The value/s can be optionally by executed if its a function
+function access( elems, key, value, exec, fn, pass ) {
+ var length = elems.length;
+
+ // Setting many attributes
+ if ( typeof key === "object" ) {
+ for ( var k in key ) {
+ access( elems, k, key[k], exec, fn, value );
+ }
+ return elems;
+ }
+
+ // Setting one attribute
+ if ( value !== undefined ) {
+ // Optionally, function values get executed if exec is true
+ exec = !pass && exec && jQuery.isFunction(value);
+
+ for ( var i = 0; i < length; i++ ) {
+ fn( elems[i], key, exec ? value.call( elems[i], i, fn( elems[i], key ) ) : value, pass );
+ }
+
+ return elems;
+ }
+
+ // Getting an attribute
+ return length ? fn( elems[0], key ) : undefined;
}
-jQuery.extend = jQuery.fn.extend = function() {
- // copy reference to target object
- var target = arguments[0] || {}, i = 1, length = arguments.length, deep = false, options;
+function now() {
+ return (new Date).getTime();
+}
+(function() {
- // Handle a deep copy situation
- if ( typeof target === "boolean" ) {
- deep = target;
- target = arguments[1] || {};
- // skip the boolean and the target
- i = 2;
+ jQuery.support = {};
+
+ var root = document.documentElement,
+ script = document.createElement("script"),
+ div = document.createElement("div"),
+ id = "script" + now();
+
+ div.style.display = "none";
+ div.innerHTML = " <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
+
+ var all = div.getElementsByTagName("*"),
+ a = div.getElementsByTagName("a")[0];
+
+ // Can't get basic test support
+ if ( !all || !all.length || !a ) {
+ return;
}
- // Handle case when target is a string or something (possible in deep copy)
- if ( typeof target !== "object" && !jQuery.isFunction(target) )
- target = {};
+ jQuery.support = {
+ // IE strips leading whitespace when .innerHTML is used
+ leadingWhitespace: div.firstChild.nodeType === 3,
- // extend jQuery itself if only one argument is passed
- if ( length == i ) {
- target = this;
- --i;
+ // Make sure that tbody elements aren't automatically inserted
+ // IE will insert them into empty tables
+ tbody: !div.getElementsByTagName("tbody").length,
+
+ // Make sure that link elements get serialized correctly by innerHTML
+ // This requires a wrapper element in IE
+ htmlSerialize: !!div.getElementsByTagName("link").length,
+
+ // Get the style information from getAttribute
+ // (IE uses .cssText insted)
+ style: /red/.test( a.getAttribute("style") ),
+
+ // Make sure that URLs aren't manipulated
+ // (IE normalizes it by default)
+ hrefNormalized: a.getAttribute("href") === "/a",
+
+ // Make sure that element opacity exists
+ // (IE uses filter instead)
+ // Use a regex to work around a WebKit issue. See #5145
+ opacity: /^0.55$/.test( a.style.opacity ),
+
+ // Verify style float existence
+ // (IE uses styleFloat instead of cssFloat)
+ cssFloat: !!a.style.cssFloat,
+
+ // Make sure that if no value is specified for a checkbox
+ // that it defaults to "on".
+ // (WebKit defaults to "" instead)
+ checkOn: div.getElementsByTagName("input")[0].value === "on",
+
+ // Make sure that a selected-by-default option has a working selected property.
+ // (WebKit defaults to false instead of true, IE too, if it's in an optgroup)
+ optSelected: document.createElement("select").appendChild( document.createElement("option") ).selected,
+
+ parentNode: div.removeChild( div.appendChild( document.createElement("div") ) ).parentNode === null,
+
+ // Will be defined later
+ deleteExpando: true,
+ checkClone: false,
+ scriptEval: false,
+ noCloneEvent: true,
+ boxModel: null
+ };
+
+ script.type = "text/javascript";
+ try {
+ script.appendChild( document.createTextNode( "window." + id + "=1;" ) );
+ } catch(e) {}
+
+ root.insertBefore( script, root.firstChild );
+
+ // Make sure that the execution of code works by injecting a script
+ // tag with appendChild/createTextNode
+ // (IE doesn't support this, fails, and uses .text instead)
+ if ( window[ id ] ) {
+ jQuery.support.scriptEval = true;
+ delete window[ id ];
}
- for ( ; i < length; i++ )
- // Only deal with non-null/undefined values
- if ( (options = arguments[ i ]) != null )
- // Extend the base object
- for ( var name in options ) {
- var src = target[ name ], copy = options[ name ];
+ // Test to see if it's possible to delete an expando from an element
+ // Fails in Internet Explorer
+ try {
+ delete script.test;
+
+ } catch(e) {
+ jQuery.support.deleteExpando = false;
+ }
- // Prevent never-ending loop
- if ( target === copy )
- continue;
+ root.removeChild( script );
- // Recurse if we're merging object values
- if ( deep && copy && typeof copy === "object" && !copy.nodeType )
- target[ name ] = jQuery.extend( deep,
- // Never move original objects, clone them
- src || ( copy.length != null ? [ ] : { } )
- , copy );
+ if ( div.attachEvent && div.fireEvent ) {
+ div.attachEvent("onclick", function click() {
+ // Cloning a node shouldn't copy over any
+ // bound event handlers (IE does this)
+ jQuery.support.noCloneEvent = false;
+ div.detachEvent("onclick", click);
+ });
+ div.cloneNode(true).fireEvent("onclick");
+ }
- // Don't bring in undefined values
- else if ( copy !== undefined )
- target[ name ] = copy;
+ div = document.createElement("div");
+ div.innerHTML = "<input type='radio' name='radiotest' checked='checked'/>";
- }
+ var fragment = document.createDocumentFragment();
+ fragment.appendChild( div.firstChild );
- // Return the modified object
- return target;
-};
+ // WebKit doesn't clone checked state correctly in fragments
+ jQuery.support.checkClone = fragment.cloneNode(true).cloneNode(true).lastChild.checked;
-// exclude the following css properties to add px
-var exclude = /z-?index|font-?weight|opacity|zoom|line-?height/i,
- // cache defaultView
- defaultView = document.defaultView || {},
- toString = Object.prototype.toString;
+ // Figure out if the W3C box model works as expected
+ // document.body must exist before we can do this
+ jQuery(function() {
+ var div = document.createElement("div");
+ div.style.width = div.style.paddingLeft = "1px";
-jQuery.extend({
- noConflict: function( deep ) {
- window.$ = _$;
+ document.body.appendChild( div );
+ jQuery.boxModel = jQuery.support.boxModel = div.offsetWidth === 2;
+ document.body.removeChild( div ).style.display = 'none';
- if ( deep )
- window.jQuery = _jQuery;
+ div = null;
+ });
- return jQuery;
- },
+ // Technique from Juriy Zaytsev
+ // http://thinkweb2.com/projects/prototype/detecting-event-support-without-browser-sniffing/
+ var eventSupported = function( eventName ) {
+ var el = document.createElement("div");
+ eventName = "on" + eventName;
- // See test/unit/core.js for details concerning isFunction.
- // Since version 1.3, DOM methods and functions like alert
- // aren't supported. They return false on IE (#2968).
- isFunction: function( obj ) {
- return toString.call(obj) === "[object Function]";
- },
+ var isSupported = (eventName in el);
+ if ( !isSupported ) {
+ el.setAttribute(eventName, "return;");
+ isSupported = typeof el[eventName] === "function";
+ }
+ el = null;
- isArray: function( obj ) {
- return toString.call(obj) === "[object Array]";
- },
+ return isSupported;
+ };
+
+ jQuery.support.submitBubbles = eventSupported("submit");
+ jQuery.support.changeBubbles = eventSupported("change");
- // check if an element is in a (or is an) XML document
- isXMLDoc: function( elem ) {
- return elem.nodeType === 9 && elem.documentElement.nodeName !== "HTML" ||
- !!elem.ownerDocument && jQuery.isXMLDoc( elem.ownerDocument );
+ // release memory in IE
+ root = script = div = all = a = null;
+})();
+
+jQuery.props = {
+ "for": "htmlFor",
+ "class": "className",
+ readonly: "readOnly",
+ maxlength: "maxLength",
+ cellspacing: "cellSpacing",
+ rowspan: "rowSpan",
+ colspan: "colSpan",
+ tabindex: "tabIndex",
+ usemap: "useMap",
+ frameborder: "frameBorder"
+};
+var expando = "jQuery" + now(), uuid = 0, windowData = {};
+
+jQuery.extend({
+ cache: {},
+
+ expando:expando,
+
+ // The following elements throw uncatchable exceptions if you
+ // attempt to add expando properties to them.
+ noData: {
+ "embed": true,
+ "object": true,
+ "applet": true
},
- // Evalulates a script in a global context
- globalEval: function( data ) {
- if ( data && /\S/.test(data) ) {
- // Inspired by code by Andrea Giammarchi
- // http://webreflection.blogspot.com/2007/08/global-scope-evaluation-and-dom.html
- var head = document.getElementsByTagName("head")[0] || document.documentElement,
- script = document.createElement("script");
+ data: function( elem, name, data ) {
+ if ( elem.nodeName && jQuery.noData[elem.nodeName.toLowerCase()] ) {
+ return;
+ }
- script.type = "text/javascript";
- if ( jQuery.support.scriptEval )
- script.appendChild( document.createTextNode( data ) );
- else
- script.text = data;
+ elem = elem == window ?
+ windowData :
+ elem;
- // Use insertBefore instead of appendChild to circumvent an IE6 bug.
- // This arises when a base node is used (#2709).
- head.insertBefore( script, head.firstChild );
- head.removeChild( script );
+ var id = elem[ expando ], cache = jQuery.cache, thisCache;
+
+ if ( !id && typeof name === "string" && data === undefined ) {
+ return null;
}
- },
- nodeName: function( elem, name ) {
- return elem.nodeName && elem.nodeName.toUpperCase() == name.toUpperCase();
- },
+ // Compute a unique ID for the element
+ if ( !id ) {
+ id = ++uuid;
+ }
- // args is for internal usage only
- each: function( object, callback, args ) {
- var name, i = 0, length = object.length;
+ // Avoid generating a new cache unless none exists and we
+ // want to manipulate it.
+ if ( typeof name === "object" ) {
+ elem[ expando ] = id;
+ thisCache = cache[ id ] = jQuery.extend(true, {}, name);
- if ( args ) {
- if ( length === undefined ) {
- for ( name in object )
- if ( callback.apply( object[ name ], args ) === false )
- break;
- } else
- for ( ; i < length; )
- if ( callback.apply( object[ i++ ], args ) === false )
- break;
+ } else if ( !cache[ id ] ) {
+ elem[ expando ] = id;
+ cache[ id ] = {};
+ }
- // A special, fast, case for the most common use of each
- } else {
- if ( length === undefined ) {
- for ( name in object )
- if ( callback.call( object[ name ], name, object[ name ] ) === false )
- break;
- } else
- for ( var value = object[0];
- i < length && callback.call( value, i, value ) !== false; value = object[++i] ){}
+ thisCache = cache[ id ];
+
+ // Prevent overriding the named cache with undefined values
+ if ( data !== undefined ) {
+ thisCache[ name ] = data;
}
- return object;
+ return typeof name === "string" ? thisCache[ name ] : thisCache;
},
- prop: function( elem, value, type, i, name ) {
- // Handle executable functions
- if ( jQuery.isFunction( value ) )
- value = value.call( elem, i );
+ removeData: function( elem, name ) {
+ if ( elem.nodeName && jQuery.noData[elem.nodeName.toLowerCase()] ) {
+ return;
+ }
+
+ elem = elem == window ?
+ windowData :
+ elem;
- // Handle passing in a number to a CSS property
- return typeof value === "number" && type == "curCSS" && !exclude.test( name ) ?
- value + "px" :
- value;
- },
+ var id = elem[ expando ], cache = jQuery.cache, thisCache = cache[ id ];
+
+ // If we want to remove a specific section of the element's data
+ if ( name ) {
+ if ( thisCache ) {
+ // Remove the section of cache data
+ delete thisCache[ name ];
+
+ // If we've removed all the data, remove the element's cache
+ if ( jQuery.isEmptyObject(thisCache) ) {
+ jQuery.removeData( elem );
+ }
+ }
+
+ // Otherwise, we want to remove all of the element's data
+ } else {
+ if ( jQuery.support.deleteExpando ) {
+ delete elem[ jQuery.expando ];
+
+ } else if ( elem.removeAttribute ) {
+ elem.removeAttribute( jQuery.expando );
+ }
- className: {
- // internal only, use addClass("class")
- add: function( elem, classNames ) {
- jQuery.each((classNames || "").split(/\s+/), function(i, className){
- if ( elem.nodeType == 1 && !jQuery.className.has( elem.className, className ) )
- elem.className += (elem.className ? " " : "") + className;
+ // Completely remove the data cache
+ delete cache[ id ];
+ }
+ }
+});
+
+jQuery.fn.extend({
+ data: function( key, value ) {
+ if ( typeof key === "undefined" && this.length ) {
+ return jQuery.data( this[0] );
+
+ } else if ( typeof key === "object" ) {
+ return this.each(function() {
+ jQuery.data( this, key );
});
- },
+ }
- // internal only, use removeClass("class")
- remove: function( elem, classNames ) {
- if (elem.nodeType == 1)
- elem.className = classNames !== undefined ?
- jQuery.grep(elem.className.split(/\s+/), function(className){
- return !jQuery.className.has( classNames, className );
- }).join(" ") :
- "";
- },
+ var parts = key.split(".");
+ parts[1] = parts[1] ? "." + parts[1] : "";
+
+ if ( value === undefined ) {
+ var data = this.triggerHandler("getData" + parts[1] + "!", [parts[0]]);
- // internal only, use hasClass("class")
- has: function( elem, className ) {
- return elem && jQuery.inArray( className, (elem.className || elem).toString().split(/\s+/) ) > -1;
+ if ( data === undefined && this.length ) {
+ data = jQuery.data( this[0], key );
+ }
+ return data === undefined && parts[1] ?
+ this.data( parts[0] ) :
+ data;
+ } else {
+ return this.trigger("setData" + parts[1] + "!", [parts[0], value]).each(function() {
+ jQuery.data( this, key, value );
+ });
}
},
- // A method for quickly swapping in/out CSS properties to get correct calculations
- swap: function( elem, options, callback ) {
- var old = {};
- // Remember the old values, and insert the new ones
- for ( var name in options ) {
- old[ name ] = elem.style[ name ];
- elem.style[ name ] = options[ name ];
+ removeData: function( key ) {
+ return this.each(function() {
+ jQuery.removeData( this, key );
+ });
+ }
+});
+jQuery.extend({
+ queue: function( elem, type, data ) {
+ if ( !elem ) {
+ return;
}
- callback.call( elem );
+ type = (type || "fx") + "queue";
+ var q = jQuery.data( elem, type );
- // Revert the old values
- for ( var name in options )
- elem.style[ name ] = old[ name ];
+ // Speed up dequeue by getting out quickly if this is just a lookup
+ if ( !data ) {
+ return q || [];
+ }
+
+ if ( !q || jQuery.isArray(data) ) {
+ q = jQuery.data( elem, type, jQuery.makeArray(data) );
+
+ } else {
+ q.push( data );
+ }
+
+ return q;
},
- css: function( elem, name, force, extra ) {
- if ( name == "width" || name == "height" ) {
- var val, props = { position: "absolute", visibility: "hidden", display:"block" }, which = name == "width" ? [ "Left", "Right" ] : [ "Top", "Bottom" ];
+ dequeue: function( elem, type ) {
+ type = type || "fx";
- function getWH() {
- val = name == "width" ? elem.offsetWidth : elem.offsetHeight;
+ var queue = jQuery.queue( elem, type ), fn = queue.shift();
- if ( extra === "border" )
- return;
+ // If the fx queue is dequeued, always remove the progress sentinel
+ if ( fn === "inprogress" ) {
+ fn = queue.shift();
+ }
- jQuery.each( which, function() {
- if ( !extra )
- val -= parseFloat(jQuery.curCSS( elem, "padding" + this, true)) || 0;
- if ( extra === "margin" )
- val += parseFloat(jQuery.curCSS( elem, "margin" + this, true)) || 0;
- else
- val -= parseFloat(jQuery.curCSS( elem, "border" + this + "Width", true)) || 0;
- });
+ if ( fn ) {
+ // Add a progress sentinel to prevent the fx queue from being
+ // automatically dequeued
+ if ( type === "fx" ) {
+ queue.unshift("inprogress");
}
- if ( elem.offsetWidth !== 0 )
- getWH();
- else
- jQuery.swap( elem, props, getWH );
+ fn.call(elem, function() {
+ jQuery.dequeue(elem, type);
+ });
+ }
+ }
+});
- return Math.max(0, Math.round(val));
+jQuery.fn.extend({
+ queue: function( type, data ) {
+ if ( typeof type !== "string" ) {
+ data = type;
+ type = "fx";
}
- return jQuery.curCSS( elem, name, force );
- },
+ if ( data === undefined ) {
+ return jQuery.queue( this[0], type );
+ }
+ return this.each(function( i, elem ) {
+ var queue = jQuery.queue( this, type, data );
- curCSS: function( elem, name, force ) {
- var ret, style = elem.style;
+ if ( type === "fx" && queue[0] !== "inprogress" ) {
+ jQuery.dequeue( this, type );
+ }
+ });
+ },
+ dequeue: function( type ) {
+ return this.each(function() {
+ jQuery.dequeue( this, type );
+ });
+ },
- // We need to handle opacity special in IE
- if ( name == "opacity" && !jQuery.support.opacity ) {
- ret = jQuery.attr( style, "opacity" );
+ // Based off of the plugin by Clint Helfers, with permission.
+ // http://blindsignals.com/index.php/2009/07/jquery-delay/
+ delay: function( time, type ) {
+ time = jQuery.fx ? jQuery.fx.speeds[time] || time : time;
+ type = type || "fx";
+
+ return this.queue( type, function() {
+ var elem = this;
+ setTimeout(function() {
+ jQuery.dequeue( elem, type );
+ }, time );
+ });
+ },
- return ret == "" ?
- "1" :
- ret;
- }
+ clearQueue: function( type ) {
+ return this.queue( type || "fx", [] );
+ }
+});
+var rclass = /[\n\t]/g,
+ rspace = /\s+/,
+ rreturn = /\r/g,
+ rspecialurl = /href|src|style/,
+ rtype = /(button|input)/i,
+ rfocusable = /(button|input|object|select|textarea)/i,
+ rclickable = /^(a|area)$/i,
+ rradiocheck = /radio|checkbox/;
- // Make sure we're using the right name for getting the float value
- if ( name.match( /float/i ) )
- name = styleFloat;
+jQuery.fn.extend({
+ attr: function( name, value ) {
+ return access( this, name, value, true, jQuery.attr );
+ },
- if ( !force && style && style[ name ] )
- ret = style[ name ];
+ removeAttr: function( name, fn ) {
+ return this.each(function(){
+ jQuery.attr( this, name, "" );
+ if ( this.nodeType === 1 ) {
+ this.removeAttribute( name );
+ }
+ });
+ },
- else if ( defaultView.getComputedStyle ) {
+ addClass: function( value ) {
+ if ( jQuery.isFunction(value) ) {
+ return this.each(function(i) {
+ var self = jQuery(this);
+ self.addClass( value.call(this, i, self.attr("class")) );
+ });
+ }
- // Only "float" is needed here
- if ( name.match( /float/i ) )
- name = "float";
+ if ( value && typeof value === "string" ) {
+ var classNames = (value || "").split( rspace );
- name = name.replace( /([A-Z])/g, "-$1" ).toLowerCase();
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ var elem = this[i];
- var computedStyle = defaultView.getComputedStyle( elem, null );
+ if ( elem.nodeType === 1 ) {
+ if ( !elem.className ) {
+ elem.className = value;
- if ( computedStyle )
- ret = computedStyle.getPropertyValue( name );
+ } else {
+ var className = " " + elem.className + " ", setClass = elem.className;
+ for ( var c = 0, cl = classNames.length; c < cl; c++ ) {
+ if ( className.indexOf( " " + classNames[c] + " " ) < 0 ) {
+ setClass += " " + classNames[c];
+ }
+ }
+ elem.className = jQuery.trim( setClass );
+ }
+ }
+ }
+ }
- // We should always get a number back from opacity
- if ( name == "opacity" && ret == "" )
- ret = "1";
+ return this;
+ },
- } else if ( elem.currentStyle ) {
- var camelCase = name.replace(/\-(\w)/g, function(all, letter){
- return letter.toUpperCase();
+ removeClass: function( value ) {
+ if ( jQuery.isFunction(value) ) {
+ return this.each(function(i) {
+ var self = jQuery(this);
+ self.removeClass( value.call(this, i, self.attr("class")) );
});
+ }
- ret = elem.currentStyle[ name ] || elem.currentStyle[ camelCase ];
+ if ( (value && typeof value === "string") || value === undefined ) {
+ var classNames = (value || "").split(rspace);
- // From the awesome hack by Dean Edwards
- // http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ var elem = this[i];
- // If we're not dealing with a regular pixel number
- // but a number that has a weird ending, we need to convert it to pixels
- if ( !/^\d+(px)?$/i.test( ret ) && /^\d/.test( ret ) ) {
- // Remember the original values
- var left = style.left, rsLeft = elem.runtimeStyle.left;
-
- // Put in the new values to get a computed value out
- elem.runtimeStyle.left = elem.currentStyle.left;
- style.left = ret || 0;
- ret = style.pixelLeft + "px";
+ if ( elem.nodeType === 1 && elem.className ) {
+ if ( value ) {
+ var className = (" " + elem.className + " ").replace(rclass, " ");
+ for ( var c = 0, cl = classNames.length; c < cl; c++ ) {
+ className = className.replace(" " + classNames[c] + " ", " ");
+ }
+ elem.className = jQuery.trim( className );
- // Revert the changed values
- style.left = left;
- elem.runtimeStyle.left = rsLeft;
+ } else {
+ elem.className = "";
+ }
+ }
}
}
- return ret;
+ return this;
},
- clean: function( elems, context, fragment ) {
- context = context || document;
-
- // !context.createElement fails in IE with an error but returns typeof 'object'
- if ( typeof context.createElement === "undefined" )
- context = context.ownerDocument || context[0] && context[0].ownerDocument || document;
+ toggleClass: function( value, stateVal ) {
+ var type = typeof value, isBool = typeof stateVal === "boolean";
- // If a single string is passed in and it's a single tag
- // just do a createElement and skip the rest
- if ( !fragment && elems.length === 1 && typeof elems[0] === "string" ) {
- var match = /^<(\w+)\s*\/?>$/.exec(elems[0]);
- if ( match )
- return [ context.createElement( match[1] ) ];
+ if ( jQuery.isFunction( value ) ) {
+ return this.each(function(i) {
+ var self = jQuery(this);
+ self.toggleClass( value.call(this, i, self.attr("class"), stateVal), stateVal );
+ });
}
- var ret = [], scripts = [], div = context.createElement("div");
+ return this.each(function() {
+ if ( type === "string" ) {
+ // toggle individual class names
+ var className, i = 0, self = jQuery(this),
+ state = stateVal,
+ classNames = value.split( rspace );
+
+ while ( (className = classNames[ i++ ]) ) {
+ // check each className given, space seperated list
+ state = isBool ? state : !self.hasClass( className );
+ self[ state ? "addClass" : "removeClass" ]( className );
+ }
- jQuery.each(elems, function(i, elem){
- if ( typeof elem === "number" )
- elem += '';
+ } else if ( type === "undefined" || type === "boolean" ) {
+ if ( this.className ) {
+ // store className if set
+ jQuery.data( this, "__className__", this.className );
+ }
- if ( !elem )
- return;
+ // toggle whole className
+ this.className = this.className || value === false ? "" : jQuery.data( this, "__className__" ) || "";
+ }
+ });
+ },
- // Convert html string into DOM nodes
- if ( typeof elem === "string" ) {
- // Fix "XHTML"-style tags in all browsers
- elem = elem.replace(/(<(\w+)[^>]*?)\/>/g, function(all, front, tag){
- return tag.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i) ?
- all :
- front + "></" + tag + ">";
- });
+ hasClass: function( selector ) {
+ var className = " " + selector + " ";
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ if ( (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) > -1 ) {
+ return true;
+ }
+ }
- // Trim whitespace, otherwise indexOf won't work as expected
- var tags = elem.replace(/^\s+/, "").substring(0, 10).toLowerCase();
+ return false;
+ },
- var wrap =
- // option or optgroup
- !tags.indexOf("<opt") &&
- [ 1, "<select multiple='multiple'>", "</select>" ] ||
+ val: function( value ) {
+ if ( value === undefined ) {
+ var elem = this[0];
- !tags.indexOf("<leg") &&
- [ 1, "<fieldset>", "</fieldset>" ] ||
+ if ( elem ) {
+ if ( jQuery.nodeName( elem, "option" ) ) {
+ return (elem.attributes.value || {}).specified ? elem.value : elem.text;
+ }
- tags.match(/^<(thead|tbody|tfoot|colg|cap)/) &&
- [ 1, "<table>", "</table>" ] ||
+ // We need to handle select boxes special
+ if ( jQuery.nodeName( elem, "select" ) ) {
+ var index = elem.selectedIndex,
+ values = [],
+ options = elem.options,
+ one = elem.type === "select-one";
- !tags.indexOf("<tr") &&
- [ 2, "<table><tbody>", "</tbody></table>" ] ||
+ // Nothing was selected
+ if ( index < 0 ) {
+ return null;
+ }
- // <thead> matched above
- (!tags.indexOf("<td") || !tags.indexOf("<th")) &&
- [ 3, "<table><tbody><tr>", "</tr></tbody></table>" ] ||
+ // Loop through all the selected options
+ for ( var i = one ? index : 0, max = one ? index + 1 : options.length; i < max; i++ ) {
+ var option = options[ i ];
- !tags.indexOf("<col") &&
- [ 2, "<table><tbody></tbody><colgroup>", "</colgroup></table>" ] ||
+ if ( option.selected ) {
+ // Get the specifc value for the option
+ value = jQuery(option).val();
- // IE can't serialize <link> and <script> tags normally
- !jQuery.support.htmlSerialize &&
- [ 1, "div<div>", "</div>" ] ||
+ // We don't need an array for one selects
+ if ( one ) {
+ return value;
+ }
- [ 0, "", "" ];
+ // Multi-Selects return an array
+ values.push( value );
+ }
+ }
- // Go to html and back, then peel off extra wrappers
- div.innerHTML = wrap[1] + elem + wrap[2];
+ return values;
+ }
- // Move to the right depth
- while ( wrap[0]-- )
- div = div.lastChild;
+ // Handle the case where in Webkit "" is returned instead of "on" if a value isn't specified
+ if ( rradiocheck.test( elem.type ) && !jQuery.support.checkOn ) {
+ return elem.getAttribute("value") === null ? "on" : elem.value;
+ }
+
- // Remove IE's autoinserted <tbody> from table fragments
- if ( !jQuery.support.tbody ) {
+ // Everything else, we just grab the value
+ return (elem.value || "").replace(rreturn, "");
- // String was a <table>, *may* have spurious <tbody>
- var hasBody = /<tbody/i.test(elem),
- tbody = !tags.indexOf("<table") && !hasBody ?
- div.firstChild && div.firstChild.childNodes :
+ }
- // String was a bare <thead> or <tfoot>
- wrap[1] == "<table>" && !hasBody ?
- div.childNodes :
- [];
+ return undefined;
+ }
- for ( var j = tbody.length - 1; j >= 0 ; --j )
- if ( jQuery.nodeName( tbody[ j ], "tbody" ) && !tbody[ j ].childNodes.length )
- tbody[ j ].parentNode.removeChild( tbody[ j ] );
+ var isFunction = jQuery.isFunction(value);
- }
+ return this.each(function(i) {
+ var self = jQuery(this), val = value;
- // IE completely kills leading whitespace when innerHTML is used
- if ( !jQuery.support.leadingWhitespace && /^\s/.test( elem ) )
- div.insertBefore( context.createTextNode( elem.match(/^\s*/)[0] ), div.firstChild );
-
- elem = jQuery.makeArray( div.childNodes );
+ if ( this.nodeType !== 1 ) {
+ return;
}
- if ( elem.nodeType )
- ret.push( elem );
- else
- ret = jQuery.merge( ret, elem );
+ if ( isFunction ) {
+ val = value.call(this, i, self.val());
+ }
- });
+ // Typecast each time if the value is a Function and the appended
+ // value is therefore different each time.
+ if ( typeof val === "number" ) {
+ val += "";
+ }
- if ( fragment ) {
- for ( var i = 0; ret[i]; i++ ) {
- if ( jQuery.nodeName( ret[i], "script" ) && (!ret[i].type || ret[i].type.toLowerCase() === "text/javascript") ) {
- scripts.push( ret[i].parentNode ? ret[i].parentNode.removeChild( ret[i] ) : ret[i] );
- } else {
- if ( ret[i].nodeType === 1 )
- ret.splice.apply( ret, [i + 1, 0].concat(jQuery.makeArray(ret[i].getElementsByTagName("script"))) );
- fragment.appendChild( ret[i] );
+ if ( jQuery.isArray(val) && rradiocheck.test( this.type ) ) {
+ this.checked = jQuery.inArray( self.val(), val ) >= 0;
+
+ } else if ( jQuery.nodeName( this, "select" ) ) {
+ var values = jQuery.makeArray(val);
+
+ jQuery( "option", this ).each(function() {
+ this.selected = jQuery.inArray( jQuery(this).val(), values ) >= 0;
+ });
+
+ if ( !values.length ) {
+ this.selectedIndex = -1;
}
+
+ } else {
+ this.value = val;
}
-
- return scripts;
- }
+ });
+ }
+});
- return ret;
+jQuery.extend({
+ attrFn: {
+ val: true,
+ css: true,
+ html: true,
+ text: true,
+ data: true,
+ width: true,
+ height: true,
+ offset: true
},
-
- attr: function( elem, name, value ) {
+
+ attr: function( elem, name, value, pass ) {
// don't set attributes on text and comment nodes
- if (!elem || elem.nodeType == 3 || elem.nodeType == 8)
+ if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 ) {
return undefined;
+ }
- var notxml = !jQuery.isXMLDoc( elem ),
+ if ( pass && name in jQuery.attrFn ) {
+ return jQuery(elem)[name](value);
+ }
+
+ var notxml = elem.nodeType !== 1 || !jQuery.isXMLDoc( elem ),
// Whether we are setting (or getting)
set = value !== undefined;
@@ -977,472 +1464,1209 @@ jQuery.extend({
name = notxml && jQuery.props[ name ] || name;
// Only do all the following if this is a node (faster for style)
- // IE elem.getAttribute passes even for style
- if ( elem.tagName ) {
-
+ if ( elem.nodeType === 1 ) {
// These attributes require special treatment
- var special = /href|src|style/.test( name );
+ var special = rspecialurl.test( name );
- // Safari mis-reports the default selected property of a hidden option
+ // Safari mis-reports the default selected property of an option
// Accessing the parent's selectedIndex property fixes it
- if ( name == "selected" && elem.parentNode )
- elem.parentNode.selectedIndex;
+ if ( name === "selected" && !jQuery.support.optSelected ) {
+ var parent = elem.parentNode;
+ if ( parent ) {
+ parent.selectedIndex;
+
+ // Make sure that it also works with optgroups, see #5701
+ if ( parent.parentNode ) {
+ parent.parentNode.selectedIndex;
+ }
+ }
+ }
// If applicable, access the attribute via the DOM 0 way
if ( name in elem && notxml && !special ) {
- if ( set ){
+ if ( set ) {
// We can't allow the type property to be changed (since it causes problems in IE)
- if ( name == "type" && jQuery.nodeName( elem, "input" ) && elem.parentNode )
- throw "type property can't be changed";
+ if ( name === "type" && rtype.test( elem.nodeName ) && elem.parentNode ) {
+ jQuery.error( "type property can't be changed" );
+ }
elem[ name ] = value;
}
// browsers index elements by id/name on forms, give priority to attributes.
- if( jQuery.nodeName( elem, "form" ) && elem.getAttributeNode(name) )
+ if ( jQuery.nodeName( elem, "form" ) && elem.getAttributeNode(name) ) {
return elem.getAttributeNode( name ).nodeValue;
+ }
// elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set
// http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/
- if ( name == "tabIndex" ) {
+ if ( name === "tabIndex" ) {
var attributeNode = elem.getAttributeNode( "tabIndex" );
- return attributeNode && attributeNode.specified
- ? attributeNode.value
- : elem.nodeName.match(/(button|input|object|select|textarea)/i)
- ? 0
- : elem.nodeName.match(/^(a|area)$/i) && elem.href
- ? 0
- : undefined;
+
+ return attributeNode && attributeNode.specified ?
+ attributeNode.value :
+ rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ?
+ 0 :
+ undefined;
}
return elem[ name ];
}
- if ( !jQuery.support.style && notxml && name == "style" )
- return jQuery.attr( elem.style, "cssText", value );
+ if ( !jQuery.support.style && notxml && name === "style" ) {
+ if ( set ) {
+ elem.style.cssText = "" + value;
+ }
+
+ return elem.style.cssText;
+ }
- if ( set )
+ if ( set ) {
// convert the value to a string (all browsers do this but IE) see #1070
elem.setAttribute( name, "" + value );
+ }
- var attr = !jQuery.support.hrefNormalized && notxml && special
+ var attr = !jQuery.support.hrefNormalized && notxml && special ?
// Some attributes require a special call on IE
- ? elem.getAttribute( name, 2 )
- : elem.getAttribute( name );
+ elem.getAttribute( name, 2 ) :
+ elem.getAttribute( name );
// Non-existent attributes return null, we normalize to undefined
return attr === null ? undefined : attr;
}
// elem is actually elem.style ... set the style
+ // Using attr for specific style information is now deprecated. Use style instead.
+ return jQuery.style( elem, name, value );
+ }
+});
+var rnamespaces = /\.(.*)$/,
+ fcleanup = function( nm ) {
+ return nm.replace(/[^\w\s\.\|`]/g, function( ch ) {
+ return "\\" + ch;
+ });
+ };
- // IE uses filters for opacity
- if ( !jQuery.support.opacity && name == "opacity" ) {
- if ( set ) {
- // IE has trouble with opacity if it does not have layout
- // Force it by setting the zoom level
- elem.zoom = 1;
+/*
+ * A number of helper functions used for managing events.
+ * Many of the ideas behind this code originated from
+ * Dean Edwards' addEvent library.
+ */
+jQuery.event = {
- // Set the alpha filter to set the opacity
- elem.filter = (elem.filter || "").replace( /alpha\([^)]*\)/, "" ) +
- (parseInt( value ) + '' == "NaN" ? "" : "alpha(opacity=" + value * 100 + ")");
- }
+ // Bind an event to an element
+ // Original by Dean Edwards
+ add: function( elem, types, handler, data ) {
+ if ( elem.nodeType === 3 || elem.nodeType === 8 ) {
+ return;
+ }
- return elem.filter && elem.filter.indexOf("opacity=") >= 0 ?
- (parseFloat( elem.filter.match(/opacity=([^)]*)/)[1] ) / 100) + '':
- "";
+ // For whatever reason, IE has trouble passing the window object
+ // around, causing it to be cloned in the process
+ if ( elem.setInterval && ( elem !== window && !elem.frameElement ) ) {
+ elem = window;
}
- name = name.replace(/-([a-z])/ig, function(all, letter){
- return letter.toUpperCase();
- });
+ var handleObjIn, handleObj;
- if ( set )
- elem[ name ] = value;
+ if ( handler.handler ) {
+ handleObjIn = handler;
+ handler = handleObjIn.handler;
+ }
- return elem[ name ];
- },
+ // Make sure that the function being executed has a unique ID
+ if ( !handler.guid ) {
+ handler.guid = jQuery.guid++;
+ }
- trim: function( text ) {
- return (text || "").replace( /^\s+|\s+$/g, "" );
- },
+ // Init the element's event structure
+ var elemData = jQuery.data( elem );
- makeArray: function( array ) {
- var ret = [];
+ // If no elemData is found then we must be trying to bind to one of the
+ // banned noData elements
+ if ( !elemData ) {
+ return;
+ }
- if( array != null ){
- var i = array.length;
- // The window, strings (and functions) also have 'length'
- if( i == null || typeof array === "string" || jQuery.isFunction(array) || array.setInterval )
- ret[0] = array;
- else
- while( i )
- ret[--i] = array[i];
+ var events = elemData.events = elemData.events || {},
+ eventHandle = elemData.handle, eventHandle;
+
+ if ( !eventHandle ) {
+ elemData.handle = eventHandle = function() {
+ // Handle the second event of a trigger and when
+ // an event is called after a page has unloaded
+ return typeof jQuery !== "undefined" && !jQuery.event.triggered ?
+ jQuery.event.handle.apply( eventHandle.elem, arguments ) :
+ undefined;
+ };
}
- return ret;
- },
+ // Add elem as a property of the handle function
+ // This is to prevent a memory leak with non-native events in IE.
+ eventHandle.elem = elem;
- inArray: function( elem, array ) {
- for ( var i = 0, length = array.length; i < length; i++ )
- // Use === because on IE, window == document
- if ( array[ i ] === elem )
- return i;
+ // Handle multiple events separated by a space
+ // jQuery(...).bind("mouseover mouseout", fn);
+ types = types.split(" ");
- return -1;
+ var type, i = 0, namespaces;
+
+ while ( (type = types[ i++ ]) ) {
+ handleObj = handleObjIn ?
+ jQuery.extend({}, handleObjIn) :
+ { handler: handler, data: data };
+
+ // Namespaced event handlers
+ if ( type.indexOf(".") > -1 ) {
+ namespaces = type.split(".");
+ type = namespaces.shift();
+ handleObj.namespace = namespaces.slice(0).sort().join(".");
+
+ } else {
+ namespaces = [];
+ handleObj.namespace = "";
+ }
+
+ handleObj.type = type;
+ handleObj.guid = handler.guid;
+
+ // Get the current list of functions bound to this event
+ var handlers = events[ type ],
+ special = jQuery.event.special[ type ] || {};
+
+ // Init the event handler queue
+ if ( !handlers ) {
+ handlers = events[ type ] = [];
+
+ // Check for a special event handler
+ // Only use addEventListener/attachEvent if the special
+ // events handler returns false
+ if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) {
+ // Bind the global event handler to the element
+ if ( elem.addEventListener ) {
+ elem.addEventListener( type, eventHandle, false );
+
+ } else if ( elem.attachEvent ) {
+ elem.attachEvent( "on" + type, eventHandle );
+ }
+ }
+ }
+
+ if ( special.add ) {
+ special.add.call( elem, handleObj );
+
+ if ( !handleObj.handler.guid ) {
+ handleObj.handler.guid = handler.guid;
+ }
+ }
+
+ // Add the function to the element's handler list
+ handlers.push( handleObj );
+
+ // Keep track of which events have been used, for global triggering
+ jQuery.event.global[ type ] = true;
+ }
+
+ // Nullify elem to prevent memory leaks in IE
+ elem = null;
},
- merge: function( first, second ) {
- // We have to loop this way because IE & Opera overwrite the length
- // expando of getElementsByTagName
- var i = 0, elem, pos = first.length;
- // Also, we need to make sure that the correct elements are being returned
- // (IE returns comment nodes in a '*' query)
- if ( !jQuery.support.getAll ) {
- while ( (elem = second[ i++ ]) != null )
- if ( elem.nodeType != 8 )
- first[ pos++ ] = elem;
-
- } else
- while ( (elem = second[ i++ ]) != null )
- first[ pos++ ] = elem;
+ global: {},
- return first;
+ // Detach an event or set of events from an element
+ remove: function( elem, types, handler, pos ) {
+ // don't do events on text and comment nodes
+ if ( elem.nodeType === 3 || elem.nodeType === 8 ) {
+ return;
+ }
+
+ var ret, type, fn, i = 0, all, namespaces, namespace, special, eventType, handleObj, origType,
+ elemData = jQuery.data( elem ),
+ events = elemData && elemData.events;
+
+ if ( !elemData || !events ) {
+ return;
+ }
+
+ // types is actually an event object here
+ if ( types && types.type ) {
+ handler = types.handler;
+ types = types.type;
+ }
+
+ // Unbind all events for the element
+ if ( !types || typeof types === "string" && types.charAt(0) === "." ) {
+ types = types || "";
+
+ for ( type in events ) {
+ jQuery.event.remove( elem, type + types );
+ }
+
+ return;
+ }
+
+ // Handle multiple events separated by a space
+ // jQuery(...).unbind("mouseover mouseout", fn);
+ types = types.split(" ");
+
+ while ( (type = types[ i++ ]) ) {
+ origType = type;
+ handleObj = null;
+ all = type.indexOf(".") < 0;
+ namespaces = [];
+
+ if ( !all ) {
+ // Namespaced event handlers
+ namespaces = type.split(".");
+ type = namespaces.shift();
+
+ namespace = new RegExp("(^|\\.)" +
+ jQuery.map( namespaces.slice(0).sort(), fcleanup ).join("\\.(?:.*\\.)?") + "(\\.|$)")
+ }
+
+ eventType = events[ type ];
+
+ if ( !eventType ) {
+ continue;
+ }
+
+ if ( !handler ) {
+ for ( var j = 0; j < eventType.length; j++ ) {
+ handleObj = eventType[ j ];
+
+ if ( all || namespace.test( handleObj.namespace ) ) {
+ jQuery.event.remove( elem, origType, handleObj.handler, j );
+ eventType.splice( j--, 1 );
+ }
+ }
+
+ continue;
+ }
+
+ special = jQuery.event.special[ type ] || {};
+
+ for ( var j = pos || 0; j < eventType.length; j++ ) {
+ handleObj = eventType[ j ];
+
+ if ( handler.guid === handleObj.guid ) {
+ // remove the given handler for the given type
+ if ( all || namespace.test( handleObj.namespace ) ) {
+ if ( pos == null ) {
+ eventType.splice( j--, 1 );
+ }
+
+ if ( special.remove ) {
+ special.remove.call( elem, handleObj );
+ }
+ }
+
+ if ( pos != null ) {
+ break;
+ }
+ }
+ }
+
+ // remove generic event handler if no more handlers exist
+ if ( eventType.length === 0 || pos != null && eventType.length === 1 ) {
+ if ( !special.teardown || special.teardown.call( elem, namespaces ) === false ) {
+ removeEvent( elem, type, elemData.handle );
+ }
+
+ ret = null;
+ delete events[ type ];
+ }
+ }
+
+ // Remove the expando if it's no longer used
+ if ( jQuery.isEmptyObject( events ) ) {
+ var handle = elemData.handle;
+ if ( handle ) {
+ handle.elem = null;
+ }
+
+ delete elemData.events;
+ delete elemData.handle;
+
+ if ( jQuery.isEmptyObject( elemData ) ) {
+ jQuery.removeData( elem );
+ }
+ }
},
- unique: function( array ) {
- var ret = [], done = {};
+ // bubbling is internal
+ trigger: function( event, data, elem /*, bubbling */ ) {
+ // Event object or event type
+ var type = event.type || event,
+ bubbling = arguments[3];
+
+ if ( !bubbling ) {
+ event = typeof event === "object" ?
+ // jQuery.Event object
+ event[expando] ? event :
+ // Object literal
+ jQuery.extend( jQuery.Event(type), event ) :
+ // Just the event type (string)
+ jQuery.Event(type);
+
+ if ( type.indexOf("!") >= 0 ) {
+ event.type = type = type.slice(0, -1);
+ event.exclusive = true;
+ }
+
+ // Handle a global trigger
+ if ( !elem ) {
+ // Don't bubble custom events when global (to avoid too much overhead)
+ event.stopPropagation();
+
+ // Only trigger if we've ever bound an event for it
+ if ( jQuery.event.global[ type ] ) {
+ jQuery.each( jQuery.cache, function() {
+ if ( this.events && this.events[type] ) {
+ jQuery.event.trigger( event, data, this.handle.elem );
+ }
+ });
+ }
+ }
+
+ // Handle triggering a single element
+ // don't do events on text and comment nodes
+ if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 ) {
+ return undefined;
+ }
+
+ // Clean up in case it is reused
+ event.result = undefined;
+ event.target = elem;
+
+ // Clone the incoming data, if any
+ data = jQuery.makeArray( data );
+ data.unshift( event );
+ }
+
+ event.currentTarget = elem;
+
+ // Trigger the event, it is assumed that "handle" is a function
+ var handle = jQuery.data( elem, "handle" );
+ if ( handle ) {
+ handle.apply( elem, data );
+ }
+
+ var parent = elem.parentNode || elem.ownerDocument;
+
+ // Trigger an inline bound script
try {
+ if ( !(elem && elem.nodeName && jQuery.noData[elem.nodeName.toLowerCase()]) ) {
+ if ( elem[ "on" + type ] && elem[ "on" + type ].apply( elem, data ) === false ) {
+ event.result = false;
+ }
+ }
+
+ // prevent IE from throwing an error for some elements with some event types, see #3533
+ } catch (e) {}
+
+ if ( !event.isPropagationStopped() && parent ) {
+ jQuery.event.trigger( event, data, parent, true );
+
+ } else if ( !event.isDefaultPrevented() ) {
+ var target = event.target, old,
+ isClick = jQuery.nodeName(target, "a") && type === "click",
+ special = jQuery.event.special[ type ] || {};
+
+ if ( (!special._default || special._default.call( elem, event ) === false) &&
+ !isClick && !(target && target.nodeName && jQuery.noData[target.nodeName.toLowerCase()]) ) {
+
+ try {
+ if ( target[ type ] ) {
+ // Make sure that we don't accidentally re-trigger the onFOO events
+ old = target[ "on" + type ];
- for ( var i = 0, length = array.length; i < length; i++ ) {
- var id = jQuery.data( array[ i ] );
+ if ( old ) {
+ target[ "on" + type ] = null;
+ }
+
+ jQuery.event.triggered = true;
+ target[ type ]();
+ }
- if ( !done[ id ] ) {
- done[ id ] = true;
- ret.push( array[ i ] );
+ // prevent IE from throwing an error for some elements with some event types, see #3533
+ } catch (e) {}
+
+ if ( old ) {
+ target[ "on" + type ] = old;
}
+
+ jQuery.event.triggered = false;
}
+ }
+ },
+
+ handle: function( event ) {
+ var all, handlers, namespaces, namespace, events;
- } catch( e ) {
- ret = array;
+ event = arguments[0] = jQuery.event.fix( event || window.event );
+ event.currentTarget = this;
+
+ // Namespaced event handlers
+ all = event.type.indexOf(".") < 0 && !event.exclusive;
+
+ if ( !all ) {
+ namespaces = event.type.split(".");
+ event.type = namespaces.shift();
+ namespace = new RegExp("(^|\\.)" + namespaces.slice(0).sort().join("\\.(?:.*\\.)?") + "(\\.|$)");
}
- return ret;
+ var events = jQuery.data(this, "events"), handlers = events[ event.type ];
+
+ if ( events && handlers ) {
+ // Clone the handlers to prevent manipulation
+ handlers = handlers.slice(0);
+
+ for ( var j = 0, l = handlers.length; j < l; j++ ) {
+ var handleObj = handlers[ j ];
+
+ // Filter the functions by class
+ if ( all || namespace.test( handleObj.namespace ) ) {
+ // Pass in a reference to the handler function itself
+ // So that we can later remove it
+ event.handler = handleObj.handler;
+ event.data = handleObj.data;
+ event.handleObj = handleObj;
+
+ var ret = handleObj.handler.apply( this, arguments );
+
+ if ( ret !== undefined ) {
+ event.result = ret;
+ if ( ret === false ) {
+ event.preventDefault();
+ event.stopPropagation();
+ }
+ }
+
+ if ( event.isImmediatePropagationStopped() ) {
+ break;
+ }
+ }
+ }
+ }
+
+ return event.result;
},
- grep: function( elems, callback, inv ) {
- var ret = [];
+ props: "altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
- // Go through the array, only saving the items
- // that pass the validator function
- for ( var i = 0, length = elems.length; i < length; i++ )
- if ( !inv != !callback( elems[ i ], i ) )
- ret.push( elems[ i ] );
+ fix: function( event ) {
+ if ( event[ expando ] ) {
+ return event;
+ }
- return ret;
+ // store a copy of the original event object
+ // and "clone" to set read-only properties
+ var originalEvent = event;
+ event = jQuery.Event( originalEvent );
+
+ for ( var i = this.props.length, prop; i; ) {
+ prop = this.props[ --i ];
+ event[ prop ] = originalEvent[ prop ];
+ }
+
+ // Fix target property, if necessary
+ if ( !event.target ) {
+ event.target = event.srcElement || document; // Fixes #1925 where srcElement might not be defined either
+ }
+
+ // check if target is a textnode (safari)
+ if ( event.target.nodeType === 3 ) {
+ event.target = event.target.parentNode;
+ }
+
+ // Add relatedTarget, if necessary
+ if ( !event.relatedTarget && event.fromElement ) {
+ event.relatedTarget = event.fromElement === event.target ? event.toElement : event.fromElement;
+ }
+
+ // Calculate pageX/Y if missing and clientX/Y available
+ if ( event.pageX == null && event.clientX != null ) {
+ var doc = document.documentElement, body = document.body;
+ event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc && doc.clientLeft || body && body.clientLeft || 0);
+ event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc && doc.clientTop || body && body.clientTop || 0);
+ }
+
+ // Add which for key events
+ if ( !event.which && ((event.charCode || event.charCode === 0) ? event.charCode : event.keyCode) ) {
+ event.which = event.charCode || event.keyCode;
+ }
+
+ // Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs)
+ if ( !event.metaKey && event.ctrlKey ) {
+ event.metaKey = event.ctrlKey;
+ }
+
+ // Add which for click: 1 === left; 2 === middle; 3 === right
+ // Note: button is not normalized, so don't use it
+ if ( !event.which && event.button !== undefined ) {
+ event.which = (event.button & 1 ? 1 : ( event.button & 2 ? 3 : ( event.button & 4 ? 2 : 0 ) ));
+ }
+
+ return event;
},
- map: function( elems, callback ) {
- var ret = [];
+ // Deprecated, use jQuery.guid instead
+ guid: 1E8,
- // Go through the array, translating each of the items to their
- // new value (or values).
- for ( var i = 0, length = elems.length; i < length; i++ ) {
- var value = callback( elems[ i ], i );
+ // Deprecated, use jQuery.proxy instead
+ proxy: jQuery.proxy,
- if ( value != null )
- ret[ ret.length ] = value;
+ special: {
+ ready: {
+ // Make sure the ready event is setup
+ setup: jQuery.bindReady,
+ teardown: jQuery.noop
+ },
+
+ live: {
+ add: function( handleObj ) {
+ jQuery.event.add( this, handleObj.origType, jQuery.extend({}, handleObj, {handler: liveHandler}) );
+ },
+
+ remove: function( handleObj ) {
+ var remove = true,
+ type = handleObj.origType.replace(rnamespaces, "");
+
+ jQuery.each( jQuery.data(this, "events").live || [], function() {
+ if ( type === this.origType.replace(rnamespaces, "") ) {
+ remove = false;
+ return false;
+ }
+ });
+
+ if ( remove ) {
+ jQuery.event.remove( this, handleObj.origType, liveHandler );
+ }
+ }
+
+ },
+
+ beforeunload: {
+ setup: function( data, namespaces, eventHandle ) {
+ // We only want to do this special case on windows
+ if ( this.setInterval ) {
+ this.onbeforeunload = eventHandle;
+ }
+
+ return false;
+ },
+ teardown: function( namespaces, eventHandle ) {
+ if ( this.onbeforeunload === eventHandle ) {
+ this.onbeforeunload = null;
+ }
+ }
}
+ }
+};
- return ret.concat.apply( [], ret );
+var removeEvent = document.removeEventListener ?
+ function( elem, type, handle ) {
+ elem.removeEventListener( type, handle, false );
+ } :
+ function( elem, type, handle ) {
+ elem.detachEvent( "on" + type, handle );
+ };
+
+jQuery.Event = function( src ) {
+ // Allow instantiation without the 'new' keyword
+ if ( !this.preventDefault ) {
+ return new jQuery.Event( src );
}
-});
-// Use of jQuery.browser is deprecated.
-// It's included for backwards compatibility and plugins,
-// although they should work to migrate away.
+ // Event object
+ if ( src && src.type ) {
+ this.originalEvent = src;
+ this.type = src.type;
+ // Event type
+ } else {
+ this.type = src;
+ }
-var userAgent = navigator.userAgent.toLowerCase();
+ // timeStamp is buggy for some events on Firefox(#3843)
+ // So we won't rely on the native value
+ this.timeStamp = now();
-// Figure out what browser is being used
-jQuery.browser = {
- version: (userAgent.match( /.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/ ) || [0,'0'])[1],
- safari: /webkit/.test( userAgent ),
- opera: /opera/.test( userAgent ),
- msie: /msie/.test( userAgent ) && !/opera/.test( userAgent ),
- mozilla: /mozilla/.test( userAgent ) && !/(compatible|webkit)/.test( userAgent )
+ // Mark it as fixed
+ this[ expando ] = true;
};
-jQuery.each({
- parent: function(elem){return elem.parentNode;},
- parents: function(elem){return jQuery.dir(elem,"parentNode");},
- next: function(elem){return jQuery.nth(elem,2,"nextSibling");},
- prev: function(elem){return jQuery.nth(elem,2,"previousSibling");},
- nextAll: function(elem){return jQuery.dir(elem,"nextSibling");},
- prevAll: function(elem){return jQuery.dir(elem,"previousSibling");},
- siblings: function(elem){return jQuery.sibling(elem.parentNode.firstChild,elem);},
- children: function(elem){return jQuery.sibling(elem.firstChild);},
- contents: function(elem){return jQuery.nodeName(elem,"iframe")?elem.contentDocument||elem.contentWindow.document:jQuery.makeArray(elem.childNodes);}
-}, function(name, fn){
- jQuery.fn[ name ] = function( selector ) {
- var ret = jQuery.map( this, fn );
+function returnFalse() {
+ return false;
+}
+function returnTrue() {
+ return true;
+}
+
+// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding
+// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html
+jQuery.Event.prototype = {
+ preventDefault: function() {
+ this.isDefaultPrevented = returnTrue;
+
+ var e = this.originalEvent;
+ if ( !e ) {
+ return;
+ }
+
+ // if preventDefault exists run it on the original event
+ if ( e.preventDefault ) {
+ e.preventDefault();
+ }
+ // otherwise set the returnValue property of the original event to false (IE)
+ e.returnValue = false;
+ },
+ stopPropagation: function() {
+ this.isPropagationStopped = returnTrue;
+
+ var e = this.originalEvent;
+ if ( !e ) {
+ return;
+ }
+ // if stopPropagation exists run it on the original event
+ if ( e.stopPropagation ) {
+ e.stopPropagation();
+ }
+ // otherwise set the cancelBubble property of the original event to true (IE)
+ e.cancelBubble = true;
+ },
+ stopImmediatePropagation: function() {
+ this.isImmediatePropagationStopped = returnTrue;
+ this.stopPropagation();
+ },
+ isDefaultPrevented: returnFalse,
+ isPropagationStopped: returnFalse,
+ isImmediatePropagationStopped: returnFalse
+};
+
+// Checks if an event happened on an element within another element
+// Used in jQuery.event.special.mouseenter and mouseleave handlers
+var withinElement = function( event ) {
+ // Check if mouse(over|out) are still within the same parent element
+ var parent = event.relatedTarget;
+
+ // Firefox sometimes assigns relatedTarget a XUL element
+ // which we cannot access the parentNode property of
+ try {
+ // Traverse up the tree
+ while ( parent && parent !== this ) {
+ parent = parent.parentNode;
+ }
+
+ if ( parent !== this ) {
+ // set the correct event type
+ event.type = event.data;
+
+ // handle event if we actually just moused on to a non sub-element
+ jQuery.event.handle.apply( this, arguments );
+ }
+
+ // assuming we've left the element since we most likely mousedover a xul element
+ } catch(e) { }
+},
- if ( selector && typeof selector == "string" )
- ret = jQuery.multiFilter( selector, ret );
+// In case of event delegation, we only need to rename the event.type,
+// liveHandler will take care of the rest.
+delegate = function( event ) {
+ event.type = event.data;
+ jQuery.event.handle.apply( this, arguments );
+};
- return this.pushStack( jQuery.unique( ret ), name, selector );
+// Create mouseenter and mouseleave events
+jQuery.each({
+ mouseenter: "mouseover",
+ mouseleave: "mouseout"
+}, function( orig, fix ) {
+ jQuery.event.special[ orig ] = {
+ setup: function( data ) {
+ jQuery.event.add( this, fix, data && data.selector ? delegate : withinElement, orig );
+ },
+ teardown: function( data ) {
+ jQuery.event.remove( this, fix, data && data.selector ? delegate : withinElement );
+ }
};
});
-jQuery.each({
- appendTo: "append",
- prependTo: "prepend",
- insertBefore: "before",
- insertAfter: "after",
- replaceAll: "replaceWith"
-}, function(name, original){
- jQuery.fn[ name ] = function( selector ) {
- var ret = [], insert = jQuery( selector );
+// submit delegation
+if ( !jQuery.support.submitBubbles ) {
+
+ jQuery.event.special.submit = {
+ setup: function( data, namespaces ) {
+ if ( this.nodeName.toLowerCase() !== "form" ) {
+ jQuery.event.add(this, "click.specialSubmit", function( e ) {
+ var elem = e.target, type = elem.type;
+
+ if ( (type === "submit" || type === "image") && jQuery( elem ).closest("form").length ) {
+ return trigger( "submit", this, arguments );
+ }
+ });
+
+ jQuery.event.add(this, "keypress.specialSubmit", function( e ) {
+ var elem = e.target, type = elem.type;
- for ( var i = 0, l = insert.length; i < l; i++ ) {
- var elems = (i > 0 ? this.clone(true) : this).get();
- jQuery.fn[ original ].apply( jQuery(insert[i]), elems );
- ret = ret.concat( elems );
+ if ( (type === "text" || type === "password") && jQuery( elem ).closest("form").length && e.keyCode === 13 ) {
+ return trigger( "submit", this, arguments );
+ }
+ });
+
+ } else {
+ return false;
+ }
+ },
+
+ teardown: function( namespaces ) {
+ jQuery.event.remove( this, ".specialSubmit" );
+ }
+ };
+
+}
+
+// change delegation, happens here so we have bind.
+if ( !jQuery.support.changeBubbles ) {
+
+ var formElems = /textarea|input|select/i,
+
+ changeFilters,
+
+ getVal = function( elem ) {
+ var type = elem.type, val = elem.value;
+
+ if ( type === "radio" || type === "checkbox" ) {
+ val = elem.checked;
+
+ } else if ( type === "select-multiple" ) {
+ val = elem.selectedIndex > -1 ?
+ jQuery.map( elem.options, function( elem ) {
+ return elem.selected;
+ }).join("-") :
+ "";
+
+ } else if ( elem.nodeName.toLowerCase() === "select" ) {
+ val = elem.selectedIndex;
}
- return this.pushStack( ret, name, selector );
+ return val;
+ },
+
+ testChange = function testChange( e ) {
+ var elem = e.target, data, val;
+
+ if ( !formElems.test( elem.nodeName ) || elem.readOnly ) {
+ return;
+ }
+
+ data = jQuery.data( elem, "_change_data" );
+ val = getVal(elem);
+
+ // the current data will be also retrieved by beforeactivate
+ if ( e.type !== "focusout" || elem.type !== "radio" ) {
+ jQuery.data( elem, "_change_data", val );
+ }
+
+ if ( data === undefined || val === data ) {
+ return;
+ }
+
+ if ( data != null || val ) {
+ e.type = "change";
+ return jQuery.event.trigger( e, arguments[1], elem );
+ }
+ };
+
+ jQuery.event.special.change = {
+ filters: {
+ focusout: testChange,
+
+ click: function( e ) {
+ var elem = e.target, type = elem.type;
+
+ if ( type === "radio" || type === "checkbox" || elem.nodeName.toLowerCase() === "select" ) {
+ return testChange.call( this, e );
+ }
+ },
+
+ // Change has to be called before submit
+ // Keydown will be called before keypress, which is used in submit-event delegation
+ keydown: function( e ) {
+ var elem = e.target, type = elem.type;
+
+ if ( (e.keyCode === 13 && elem.nodeName.toLowerCase() !== "textarea") ||
+ (e.keyCode === 32 && (type === "checkbox" || type === "radio")) ||
+ type === "select-multiple" ) {
+ return testChange.call( this, e );
+ }
+ },
+
+ // Beforeactivate happens also before the previous element is blurred
+ // with this event you can't trigger a change event, but you can store
+ // information/focus[in] is not needed anymore
+ beforeactivate: function( e ) {
+ var elem = e.target;
+ jQuery.data( elem, "_change_data", getVal(elem) );
+ }
+ },
+
+ setup: function( data, namespaces ) {
+ if ( this.type === "file" ) {
+ return false;
+ }
+
+ for ( var type in changeFilters ) {
+ jQuery.event.add( this, type + ".specialChange", changeFilters[type] );
+ }
+
+ return formElems.test( this.nodeName );
+ },
+
+ teardown: function( namespaces ) {
+ jQuery.event.remove( this, ".specialChange" );
+
+ return formElems.test( this.nodeName );
+ }
+ };
+
+ changeFilters = jQuery.event.special.change.filters;
+}
+
+function trigger( type, elem, args ) {
+ args[0].type = type;
+ return jQuery.event.handle.apply( elem, args );
+}
+
+// Create "bubbling" focus and blur events
+if ( document.addEventListener ) {
+ jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) {
+ jQuery.event.special[ fix ] = {
+ setup: function() {
+ this.addEventListener( orig, handler, true );
+ },
+ teardown: function() {
+ this.removeEventListener( orig, handler, true );
+ }
+ };
+
+ function handler( e ) {
+ e = jQuery.event.fix( e );
+ e.type = fix;
+ return jQuery.event.handle.call( this, e );
+ }
+ });
+}
+
+jQuery.each(["bind", "one"], function( i, name ) {
+ jQuery.fn[ name ] = function( type, data, fn ) {
+ // Handle object literals
+ if ( typeof type === "object" ) {
+ for ( var key in type ) {
+ this[ name ](key, data, type[key], fn);
+ }
+ return this;
+ }
+
+ if ( jQuery.isFunction( data ) ) {
+ fn = data;
+ data = undefined;
+ }
+
+ var handler = name === "one" ? jQuery.proxy( fn, function( event ) {
+ jQuery( this ).unbind( event, handler );
+ return fn.apply( this, arguments );
+ }) : fn;
+
+ if ( type === "unload" && name !== "one" ) {
+ this.one( type, data, fn );
+
+ } else {
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ jQuery.event.add( this[i], type, handler, data );
+ }
+ }
+
+ return this;
};
});
-jQuery.each({
- removeAttr: function( name ) {
- jQuery.attr( this, name, "" );
- if (this.nodeType == 1)
- this.removeAttribute( name );
- },
+jQuery.fn.extend({
+ unbind: function( type, fn ) {
+ // Handle object literals
+ if ( typeof type === "object" && !type.preventDefault ) {
+ for ( var key in type ) {
+ this.unbind(key, type[key]);
+ }
- addClass: function( classNames ) {
- jQuery.className.add( this, classNames );
- },
+ } else {
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ jQuery.event.remove( this[i], type, fn );
+ }
+ }
- removeClass: function( classNames ) {
- jQuery.className.remove( this, classNames );
+ return this;
+ },
+
+ delegate: function( selector, types, data, fn ) {
+ return this.live( types, data, fn, selector );
+ },
+
+ undelegate: function( selector, types, fn ) {
+ if ( arguments.length === 0 ) {
+ return this.unbind( "live" );
+
+ } else {
+ return this.die( types, null, fn, selector );
+ }
+ },
+
+ trigger: function( type, data ) {
+ return this.each(function() {
+ jQuery.event.trigger( type, data, this );
+ });
},
- toggleClass: function( classNames, state ) {
- if( typeof state !== "boolean" )
- state = !jQuery.className.has( this, classNames );
- jQuery.className[ state ? "add" : "remove" ]( this, classNames );
+ triggerHandler: function( type, data ) {
+ if ( this[0] ) {
+ var event = jQuery.Event( type );
+ event.preventDefault();
+ event.stopPropagation();
+ jQuery.event.trigger( event, data, this[0] );
+ return event.result;
+ }
},
- remove: function( selector ) {
- if ( !selector || jQuery.filter( selector, [ this ] ).length ) {
- // Prevent memory leaks
- jQuery( "*", this ).add([this]).each(function(){
- jQuery.event.remove(this);
- jQuery.removeData(this);
- });
- if (this.parentNode)
- this.parentNode.removeChild( this );
+ toggle: function( fn ) {
+ // Save reference to arguments for access in closure
+ var args = arguments, i = 1;
+
+ // link all the functions, so any of them can unbind this click handler
+ while ( i < args.length ) {
+ jQuery.proxy( fn, args[ i++ ] );
}
+
+ return this.click( jQuery.proxy( fn, function( event ) {
+ // Figure out which function to execute
+ var lastToggle = ( jQuery.data( this, "lastToggle" + fn.guid ) || 0 ) % i;
+ jQuery.data( this, "lastToggle" + fn.guid, lastToggle + 1 );
+
+ // Make sure that clicks stop
+ event.preventDefault();
+
+ // and execute the function
+ return args[ lastToggle ].apply( this, arguments ) || false;
+ }));
},
- empty: function() {
- // Remove element nodes and prevent memory leaks
- jQuery(this).children().remove();
+ hover: function( fnOver, fnOut ) {
+ return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );
+ }
+});
+
+var liveMap = {
+ focus: "focusin",
+ blur: "focusout",
+ mouseenter: "mouseover",
+ mouseleave: "mouseout"
+};
+
+jQuery.each(["live", "die"], function( i, name ) {
+ jQuery.fn[ name ] = function( types, data, fn, origSelector /* Internal Use Only */ ) {
+ var type, i = 0, match, namespaces, preType,
+ selector = origSelector || this.selector,
+ context = origSelector ? this : jQuery( this.context );
+
+ if ( jQuery.isFunction( data ) ) {
+ fn = data;
+ data = undefined;
+ }
+
+ types = (types || "").split(" ");
+
+ while ( (type = types[ i++ ]) != null ) {
+ match = rnamespaces.exec( type );
+ namespaces = "";
+
+ if ( match ) {
+ namespaces = match[0];
+ type = type.replace( rnamespaces, "" );
+ }
+
+ if ( type === "hover" ) {
+ types.push( "mouseenter" + namespaces, "mouseleave" + namespaces );
+ continue;
+ }
+
+ preType = type;
+
+ if ( type === "focus" || type === "blur" ) {
+ types.push( liveMap[ type ] + namespaces );
+ type = type + namespaces;
+
+ } else {
+ type = (liveMap[ type ] || type) + namespaces;
+ }
+
+ if ( name === "live" ) {
+ // bind live handler
+ context.each(function(){
+ jQuery.event.add( this, liveConvert( type, selector ),
+ { data: data, selector: selector, handler: fn, origType: type, origHandler: fn, preType: preType } );
+ });
- // Remove any remaining nodes
- while ( this.firstChild )
- this.removeChild( this.firstChild );
+ } else {
+ // unbind live handler
+ context.unbind( liveConvert( type, selector ), fn );
+ }
+ }
+
+ return this;
}
-}, function(name, fn){
- jQuery.fn[ name ] = function(){
- return this.each( fn, arguments );
+});
+
+function liveHandler( event ) {
+ var stop, elems = [], selectors = [], args = arguments,
+ related, match, handleObj, elem, j, i, l, data,
+ events = jQuery.data( this, "events" );
+
+ // Make sure we avoid non-left-click bubbling in Firefox (#3861)
+ if ( event.liveFired === this || !events || !events.live || event.button && event.type === "click" ) {
+ return;
+ }
+
+ event.liveFired = this;
+
+ var live = events.live.slice(0);
+
+ for ( j = 0; j < live.length; j++ ) {
+ handleObj = live[j];
+
+ if ( handleObj.origType.replace( rnamespaces, "" ) === event.type ) {
+ selectors.push( handleObj.selector );
+
+ } else {
+ live.splice( j--, 1 );
+ }
+ }
+
+ match = jQuery( event.target ).closest( selectors, event.currentTarget );
+
+ for ( i = 0, l = match.length; i < l; i++ ) {
+ for ( j = 0; j < live.length; j++ ) {
+ handleObj = live[j];
+
+ if ( match[i].selector === handleObj.selector ) {
+ elem = match[i].elem;
+ related = null;
+
+ // Those two events require additional checking
+ if ( handleObj.preType === "mouseenter" || handleObj.preType === "mouseleave" ) {
+ related = jQuery( event.relatedTarget ).closest( handleObj.selector )[0];
+ }
+
+ if ( !related || related !== elem ) {
+ elems.push({ elem: elem, handleObj: handleObj });
+ }
+ }
+ }
+ }
+
+ for ( i = 0, l = elems.length; i < l; i++ ) {
+ match = elems[i];
+ event.currentTarget = match.elem;
+ event.data = match.handleObj.data;
+ event.handleObj = match.handleObj;
+
+ if ( match.handleObj.origHandler.apply( match.elem, args ) === false ) {
+ stop = false;
+ break;
+ }
+ }
+
+ return stop;
+}
+
+function liveConvert( type, selector ) {
+ return "live." + (type && type !== "*" ? type + "." : "") + selector.replace(/\./g, "`").replace(/ /g, "&");
+}
+
+jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " +
+ "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
+ "change select submit keydown keypress keyup error").split(" "), function( i, name ) {
+
+ // Handle event binding
+ jQuery.fn[ name ] = function( fn ) {
+ return fn ? this.bind( name, fn ) : this.trigger( name );
};
+
+ if ( jQuery.attrFn ) {
+ jQuery.attrFn[ name ] = true;
+ }
});
-// Helper function used by the dimensions and offset modules
-function num(elem, prop) {
- return elem[0] && parseInt( jQuery.curCSS(elem[0], prop, true), 10 ) || 0;
+// Prevent memory leaks in IE
+// Window isn't included so as not to unbind existing unload events
+// More info:
+// - http://isaacschlueter.com/2006/10/msie-memory-leaks/
+if ( window.attachEvent && !window.addEventListener ) {
+ window.attachEvent("onunload", function() {
+ for ( var id in jQuery.cache ) {
+ if ( jQuery.cache[ id ].handle ) {
+ // Try/Catch is to handle iframes being unloaded, see #4280
+ try {
+ jQuery.event.remove( jQuery.cache[ id ].handle.elem );
+ } catch(e) {}
+ }
+ }
+ });
}
-var expando = "jQuery" + now(), uuid = 0, windowData = {};
-
-jQuery.extend({
- cache: {},
-
- data: function( elem, name, data ) {
- elem = elem == window ?
- windowData :
- elem;
-
- var id = elem[ expando ];
-
- // Compute a unique ID for the element
- if ( !id )
- id = elem[ expando ] = ++uuid;
-
- // Only generate the data cache if we're
- // trying to access or manipulate it
- if ( name && !jQuery.cache[ id ] )
- jQuery.cache[ id ] = {};
-
- // Prevent overriding the named cache with undefined values
- if ( data !== undefined )
- jQuery.cache[ id ][ name ] = data;
-
- // Return the named cache data, or the ID for the element
- return name ?
- jQuery.cache[ id ][ name ] :
- id;
- },
-
- removeData: function( elem, name ) {
- elem = elem == window ?
- windowData :
- elem;
-
- var id = elem[ expando ];
-
- // If we want to remove a specific section of the element's data
- if ( name ) {
- if ( jQuery.cache[ id ] ) {
- // Remove the section of cache data
- delete jQuery.cache[ id ][ name ];
-
- // If we've removed all the data, remove the element's cache
- name = "";
-
- for ( name in jQuery.cache[ id ] )
- break;
-
- if ( !name )
- jQuery.removeData( elem );
- }
-
- // Otherwise, we want to remove all of the element's data
- } else {
- // Clean up the element expando
- try {
- delete elem[ expando ];
- } catch(e){
- // IE has trouble directly removing the expando
- // but it's ok with using removeAttribute
- if ( elem.removeAttribute )
- elem.removeAttribute( expando );
- }
-
- // Completely remove the data cache
- delete jQuery.cache[ id ];
- }
- },
- queue: function( elem, type, data ) {
- if ( elem ){
-
- type = (type || "fx") + "queue";
-
- var q = jQuery.data( elem, type );
-
- if ( !q || jQuery.isArray(data) )
- q = jQuery.data( elem, type, jQuery.makeArray(data) );
- else if( data )
- q.push( data );
-
- }
- return q;
- },
-
- dequeue: function( elem, type ){
- var queue = jQuery.queue( elem, type ),
- fn = queue.shift();
-
- if( !type || type === "fx" )
- fn = queue[0];
-
- if( fn !== undefined )
- fn.call(elem);
- }
-});
-
-jQuery.fn.extend({
- data: function( key, value ){
- var parts = key.split(".");
- parts[1] = parts[1] ? "." + parts[1] : "";
-
- if ( value === undefined ) {
- var data = this.triggerHandler("getData" + parts[1] + "!", [parts[0]]);
-
- if ( data === undefined && this.length )
- data = jQuery.data( this[0], key );
-
- return data === undefined && parts[1] ?
- this.data( parts[0] ) :
- data;
- } else
- return this.trigger("setData" + parts[1] + "!", [parts[0], value]).each(function(){
- jQuery.data( this, key, value );
- });
- },
-
- removeData: function( key ){
- return this.each(function(){
- jQuery.removeData( this, key );
- });
- },
- queue: function(type, data){
- if ( typeof type !== "string" ) {
- data = type;
- type = "fx";
- }
-
- if ( data === undefined )
- return jQuery.queue( this[0], type );
-
- return this.each(function(){
- var queue = jQuery.queue( this, type, data );
-
- if( type == "fx" && queue.length == 1 )
- queue[0].call(this);
- });
- },
- dequeue: function(type){
- return this.each(function(){
- jQuery.dequeue( this, type );
- });
- }
-});/*!
- * Sizzle CSS Selector Engine - v0.9.3
+/*!
+ * Sizzle CSS Selector Engine - v1.0
* Copyright 2009, The Dojo Foundation
* Released under the MIT, BSD, and GPL Licenses.
* More information: http://sizzlejs.com/
*/
(function(){
-var chunker = /((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?/g,
+var chunker = /((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
done = 0,
- toString = Object.prototype.toString;
+ toString = Object.prototype.toString,
+ hasDuplicate = false,
+ baseHasDuplicate = true;
+
+// Here we check if the JavaScript engine is using some sort of
+// optimization where it does not always call our comparision
+// function. If that is the case, discard the hasDuplicate value.
+// Thus far that includes Google Chrome.
+[0, 0].sort(function(){
+ baseHasDuplicate = false;
+ return 0;
+});
var Sizzle = function(selector, context, results, seed) {
results = results || [];
- context = context || document;
+ var origContext = context = context || document;
- if ( context.nodeType !== 1 && context.nodeType !== 9 )
+ if ( context.nodeType !== 1 && context.nodeType !== 9 ) {
return [];
+ }
if ( !selector || typeof selector !== "string" ) {
return results;
}
- var parts = [], m, set, checkSet, check, mode, extra, prune = true;
+ var parts = [], m, set, checkSet, extra, prune = true, contextXML = isXML(context),
+ soFar = selector;
// Reset the position of the chunker regexp (start from head)
- chunker.lastIndex = 0;
-
- while ( (m = chunker.exec(selector)) !== null ) {
+ while ( (chunker.exec(""), m = chunker.exec(soFar)) !== null ) {
+ soFar = m[3];
+
parts.push( m[1] );
if ( m[2] ) {
- extra = RegExp.rightContext;
+ extra = m[3];
break;
}
}
@@ -1458,38 +2682,51 @@ var Sizzle = function(selector, context, results, seed) {
while ( parts.length ) {
selector = parts.shift();
- if ( Expr.relative[ selector ] )
+ if ( Expr.relative[ selector ] ) {
selector += parts.shift();
-
+ }
+
set = posProcess( selector, set );
}
}
} else {
- var ret = seed ?
- { expr: parts.pop(), set: makeArray(seed) } :
- Sizzle.find( parts.pop(), parts.length === 1 && context.parentNode ? context.parentNode : context, isXML(context) );
- set = Sizzle.filter( ret.expr, ret.set );
-
- if ( parts.length > 0 ) {
- checkSet = makeArray(set);
- } else {
- prune = false;
+ // Take a shortcut and set the context if the root selector is an ID
+ // (but not if it'll be faster if the inner selector is an ID)
+ if ( !seed && parts.length > 1 && context.nodeType === 9 && !contextXML &&
+ Expr.match.ID.test(parts[0]) && !Expr.match.ID.test(parts[parts.length - 1]) ) {
+ var ret = Sizzle.find( parts.shift(), context, contextXML );
+ context = ret.expr ? Sizzle.filter( ret.expr, ret.set )[0] : ret.set[0];
}
- while ( parts.length ) {
- var cur = parts.pop(), pop = cur;
+ if ( context ) {
+ var ret = seed ?
+ { expr: parts.pop(), set: makeArray(seed) } :
+ Sizzle.find( parts.pop(), parts.length === 1 && (parts[0] === "~" || parts[0] === "+") && context.parentNode ? context.parentNode : context, contextXML );
+ set = ret.expr ? Sizzle.filter( ret.expr, ret.set ) : ret.set;
- if ( !Expr.relative[ cur ] ) {
- cur = "";
+ if ( parts.length > 0 ) {
+ checkSet = makeArray(set);
} else {
- pop = parts.pop();
+ prune = false;
}
- if ( pop == null ) {
- pop = context;
- }
+ while ( parts.length ) {
+ var cur = parts.pop(), pop = cur;
+
+ if ( !Expr.relative[ cur ] ) {
+ cur = "";
+ } else {
+ pop = parts.pop();
+ }
- Expr.relative[ cur ]( checkSet, pop, isXML(context) );
+ if ( pop == null ) {
+ pop = context;
+ }
+
+ Expr.relative[ cur ]( checkSet, pop, contextXML );
+ }
+ } else {
+ checkSet = parts = [];
}
}
@@ -1498,13 +2735,13 @@ var Sizzle = function(selector, context, results, seed) {
}
if ( !checkSet ) {
- throw "Syntax error, unrecognized expression: " + (cur || selector);
+ Sizzle.error( cur || selector );
}
if ( toString.call(checkSet) === "[object Array]" ) {
if ( !prune ) {
results.push.apply( results, checkSet );
- } else if ( context.nodeType === 1 ) {
+ } else if ( context && context.nodeType === 1 ) {
for ( var i = 0; checkSet[i] != null; i++ ) {
if ( checkSet[i] && (checkSet[i] === true || checkSet[i].nodeType === 1 && contains(context, checkSet[i])) ) {
results.push( set[i] );
@@ -1522,17 +2759,22 @@ var Sizzle = function(selector, context, results, seed) {
}
if ( extra ) {
- Sizzle( extra, context, results, seed );
+ Sizzle( extra, origContext, results, seed );
+ Sizzle.uniqueSort( results );
+ }
- if ( sortOrder ) {
- hasDuplicate = false;
- results.sort(sortOrder);
+ return results;
+};
- if ( hasDuplicate ) {
- for ( var i = 1; i < results.length; i++ ) {
- if ( results[i] === results[i-1] ) {
- results.splice(i--, 1);
- }
+Sizzle.uniqueSort = function(results){
+ if ( sortOrder ) {
+ hasDuplicate = baseHasDuplicate;
+ results.sort(sortOrder);
+
+ if ( hasDuplicate ) {
+ for ( var i = 1; i < results.length; i++ ) {
+ if ( results[i] === results[i-1] ) {
+ results.splice(i--, 1);
}
}
}
@@ -1555,8 +2797,9 @@ Sizzle.find = function(expr, context, isXML){
for ( var i = 0, l = Expr.order.length; i < l; i++ ) {
var type = Expr.order[i], match;
- if ( (match = Expr.match[ type ].exec( expr )) ) {
- var left = RegExp.leftContext;
+ if ( (match = Expr.leftMatch[ type ].exec( expr )) ) {
+ var left = match[1];
+ match.splice(1,1);
if ( left.substr( left.length - 1 ) !== "\\" ) {
match[1] = (match[1] || "").replace(/\\/g, "");
@@ -1582,11 +2825,17 @@ Sizzle.filter = function(expr, set, inplace, not){
while ( expr && set.length ) {
for ( var type in Expr.filter ) {
- if ( (match = Expr.match[ type ].exec( expr )) != null ) {
- var filter = Expr.filter[ type ], found, item;
+ if ( (match = Expr.leftMatch[ type ].exec( expr )) != null && match[2] ) {
+ var filter = Expr.filter[ type ], found, item, left = match[1];
anyFound = false;
- if ( curLoop == result ) {
+ match.splice(1,1);
+
+ if ( left.substr( left.length - 1 ) === "\\" ) {
+ continue;
+ }
+
+ if ( curLoop === result ) {
result = [];
}
@@ -1637,9 +2886,9 @@ Sizzle.filter = function(expr, set, inplace, not){
}
// Improper expression
- if ( expr == old ) {
+ if ( expr === old ) {
if ( anyFound == null ) {
- throw "Syntax error, unrecognized expression: " + expr;
+ Sizzle.error( expr );
} else {
break;
}
@@ -1651,18 +2900,23 @@ Sizzle.filter = function(expr, set, inplace, not){
return curLoop;
};
+Sizzle.error = function( msg ) {
+ throw "Syntax error, unrecognized expression: " + msg;
+};
+
var Expr = Sizzle.selectors = {
order: [ "ID", "NAME", "TAG" ],
match: {
- ID: /#((?:[\w\u00c0-\uFFFF_-]|\\.)+)/,
- CLASS: /\.((?:[\w\u00c0-\uFFFF_-]|\\.)+)/,
- NAME: /\[name=['"]*((?:[\w\u00c0-\uFFFF_-]|\\.)+)['"]*\]/,
- ATTR: /\[\s*((?:[\w\u00c0-\uFFFF_-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,
- TAG: /^((?:[\w\u00c0-\uFFFF\*_-]|\\.)+)/,
+ ID: /#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+ CLASS: /\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+ NAME: /\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,
+ ATTR: /\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,
+ TAG: /^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,
CHILD: /:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,
POS: /:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,
- PSEUDO: /:((?:[\w\u00c0-\uFFFF_-]|\\.)+)(?:\((['"]*)((?:\([^\)]+\)|[^\2\(\)]*)+)\2\))?/
+ PSEUDO: /:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/
},
+ leftMatch: {},
attrMap: {
"class": "className",
"for": "htmlFor"
@@ -1673,20 +2927,20 @@ var Expr = Sizzle.selectors = {
}
},
relative: {
- "+": function(checkSet, part, isXML){
+ "+": function(checkSet, part){
var isPartStr = typeof part === "string",
isTag = isPartStr && !/\W/.test(part),
isPartStrNotTag = isPartStr && !isTag;
- if ( isTag && !isXML ) {
- part = part.toUpperCase();
+ if ( isTag ) {
+ part = part.toLowerCase();
}
for ( var i = 0, l = checkSet.length, elem; i < l; i++ ) {
if ( (elem = checkSet[i]) ) {
while ( (elem = elem.previousSibling) && elem.nodeType !== 1 ) {}
- checkSet[i] = isPartStrNotTag || elem && elem.nodeName === part ?
+ checkSet[i] = isPartStrNotTag || elem && elem.nodeName.toLowerCase() === part ?
elem || false :
elem === part;
}
@@ -1696,17 +2950,17 @@ var Expr = Sizzle.selectors = {
Sizzle.filter( part, checkSet, true );
}
},
- ">": function(checkSet, part, isXML){
+ ">": function(checkSet, part){
var isPartStr = typeof part === "string";
if ( isPartStr && !/\W/.test(part) ) {
- part = isXML ? part : part.toUpperCase();
+ part = part.toLowerCase();
for ( var i = 0, l = checkSet.length; i < l; i++ ) {
var elem = checkSet[i];
if ( elem ) {
var parent = elem.parentNode;
- checkSet[i] = parent.nodeName === part ? parent : false;
+ checkSet[i] = parent.nodeName.toLowerCase() === part ? parent : false;
}
}
} else {
@@ -1727,8 +2981,8 @@ var Expr = Sizzle.selectors = {
"": function(checkSet, part, isXML){
var doneName = done++, checkFn = dirCheck;
- if ( !part.match(/\W/) ) {
- var nodeCheck = part = isXML ? part : part.toUpperCase();
+ if ( typeof part === "string" && !/\W/.test(part) ) {
+ var nodeCheck = part = part.toLowerCase();
checkFn = dirNodeCheck;
}
@@ -1737,8 +2991,8 @@ var Expr = Sizzle.selectors = {
"~": function(checkSet, part, isXML){
var doneName = done++, checkFn = dirCheck;
- if ( typeof part === "string" && !part.match(/\W/) ) {
- var nodeCheck = part = isXML ? part : part.toUpperCase();
+ if ( typeof part === "string" && !/\W/.test(part) ) {
+ var nodeCheck = part = part.toLowerCase();
checkFn = dirNodeCheck;
}
@@ -1752,7 +3006,7 @@ var Expr = Sizzle.selectors = {
return m ? [m] : [];
}
},
- NAME: function(match, context, isXML){
+ NAME: function(match, context){
if ( typeof context.getElementsByName !== "undefined" ) {
var ret = [], results = context.getElementsByName(match[1]);
@@ -1779,9 +3033,10 @@ var Expr = Sizzle.selectors = {
for ( var i = 0, elem; (elem = curLoop[i]) != null; i++ ) {
if ( elem ) {
- if ( not ^ (elem.className && (" " + elem.className + " ").indexOf(match) >= 0) ) {
- if ( !inplace )
+ if ( not ^ (elem.className && (" " + elem.className + " ").replace(/[\t\n]/g, " ").indexOf(match) >= 0) ) {
+ if ( !inplace ) {
result.push( elem );
+ }
} else if ( inplace ) {
curLoop[i] = false;
}
@@ -1794,14 +3049,13 @@ var Expr = Sizzle.selectors = {
return match[1].replace(/\\/g, "");
},
TAG: function(match, curLoop){
- for ( var i = 0; curLoop[i] === false; i++ ){}
- return curLoop[i] && isXML(curLoop[i]) ? match[1] : match[1].toUpperCase();
+ return match[1].toLowerCase();
},
CHILD: function(match){
- if ( match[1] == "nth" ) {
+ if ( match[1] === "nth" ) {
// parse equations like 'even', 'odd', '5', '2n', '3n+2', '4n-1', '-n+6'
var test = /(-?)(\d*)n((?:\+|-)?\d*)/.exec(
- match[2] == "even" && "2n" || match[2] == "odd" && "2n+1" ||
+ match[2] === "even" && "2n" || match[2] === "odd" && "2n+1" ||
!/\D/.test( match[2] ) && "0n+" + match[2] || match[2]);
// calculate the numbers (first)n+(last) including if they are negative
@@ -1830,7 +3084,7 @@ var Expr = Sizzle.selectors = {
PSEUDO: function(match, curLoop, inplace, result, not){
if ( match[1] === "not" ) {
// If we're dealing with a complex expression, or a simple one
- if ( match[3].match(chunker).length > 1 || /^\w/.test(match[3]) ) {
+ if ( ( chunker.exec(match[3]) || "" ).length > 1 || /^\w/.test(match[3]) ) {
match[3] = Sizzle(match[3], null, null, curLoop);
} else {
var ret = Sizzle.filter(match[3], curLoop, inplace, true ^ not);
@@ -1903,7 +3157,7 @@ var Expr = Sizzle.selectors = {
return "reset" === elem.type;
},
button: function(elem){
- return "button" === elem.type || elem.nodeName.toUpperCase() === "BUTTON";
+ return "button" === elem.type || elem.nodeName.toLowerCase() === "button";
},
input: function(elem){
return /input|select|textarea|button/i.test(elem.nodeName);
@@ -1929,10 +3183,10 @@ var Expr = Sizzle.selectors = {
return i > match[3] - 0;
},
nth: function(elem, i, match){
- return match[3] - 0 == i;
+ return match[3] - 0 === i;
},
eq: function(elem, i, match){
- return match[3] - 0 == i;
+ return match[3] - 0 === i;
}
},
filter: {
@@ -1942,7 +3196,7 @@ var Expr = Sizzle.selectors = {
if ( filter ) {
return filter( elem, i, match, array );
} else if ( name === "contains" ) {
- return (elem.textContent || elem.innerText || "").indexOf(match[3]) >= 0;
+ return (elem.textContent || elem.innerText || getText([ elem ]) || "").indexOf(match[3]) >= 0;
} else if ( name === "not" ) {
var not = match[3];
@@ -1953,6 +3207,8 @@ var Expr = Sizzle.selectors = {
}
return true;
+ } else {
+ Sizzle.error( "Syntax error, unrecognized expression: " + name );
}
},
CHILD: function(elem, match){
@@ -1960,20 +3216,26 @@ var Expr = Sizzle.selectors = {
switch (type) {
case 'only':
case 'first':
- while (node = node.previousSibling) {
- if ( node.nodeType === 1 ) return false;
+ while ( (node = node.previousSibling) ) {
+ if ( node.nodeType === 1 ) {
+ return false;
+ }
+ }
+ if ( type === "first" ) {
+ return true;
}
- if ( type == 'first') return true;
node = elem;
case 'last':
- while (node = node.nextSibling) {
- if ( node.nodeType === 1 ) return false;
+ while ( (node = node.nextSibling) ) {
+ if ( node.nodeType === 1 ) {
+ return false;
+ }
}
return true;
case 'nth':
var first = match[2], last = match[3];
- if ( first == 1 && last == 0 ) {
+ if ( first === 1 && last === 0 ) {
return true;
}
@@ -1991,10 +3253,10 @@ var Expr = Sizzle.selectors = {
}
var diff = elem.nodeIndex - last;
- if ( first == 0 ) {
- return diff == 0;
+ if ( first === 0 ) {
+ return diff === 0;
} else {
- return ( diff % first == 0 && diff / first >= 0 );
+ return ( diff % first === 0 && diff / first >= 0 );
}
}
},
@@ -2002,7 +3264,7 @@ var Expr = Sizzle.selectors = {
return elem.nodeType === 1 && elem.getAttribute("id") === match;
},
TAG: function(elem, match){
- return (match === "*" && elem.nodeType === 1) || elem.nodeName === match;
+ return (match === "*" && elem.nodeType === 1) || elem.nodeName.toLowerCase() === match;
},
CLASS: function(elem, match){
return (" " + (elem.className || elem.getAttribute("class")) + " ")
@@ -2030,7 +3292,7 @@ var Expr = Sizzle.selectors = {
!check ?
value && result !== false :
type === "!=" ?
- value != check :
+ value !== check :
type === "^=" ?
value.indexOf(check) === 0 :
type === "$=" ?
@@ -2052,11 +3314,14 @@ var Expr = Sizzle.selectors = {
var origPOS = Expr.match.POS;
for ( var type in Expr.match ) {
- Expr.match[ type ] = RegExp( Expr.match[ type ].source + /(?![^\[]*\])(?![^\(]*\))/.source );
+ Expr.match[ type ] = new RegExp( Expr.match[ type ].source + /(?![^\[]*\])(?![^\(]*\))/.source );
+ Expr.leftMatch[ type ] = new RegExp( /(^(?:.|\r|\n)*?)/.source + Expr.match[ type ].source.replace(/\\(\d+)/g, function(all, num){
+ return "\\" + (num - 0 + 1);
+ }));
}
var makeArray = function(array, results) {
- array = Array.prototype.slice.call( array );
+ array = Array.prototype.slice.call( array, 0 );
if ( results ) {
results.push.apply( results, array );
@@ -2068,8 +3333,10 @@ var makeArray = function(array, results) {
// Perform a simple check to determine if the browser is capable of
// converting a NodeList to an array using builtin methods.
+// Also verifies that the returned array holds DOM nodes
+// (which is not the case in the Blackberry browser)
try {
- Array.prototype.slice.call( document.documentElement.childNodes );
+ Array.prototype.slice.call( document.documentElement.childNodes, 0 )[0].nodeType;
// Provide a fallback method if it does not work
} catch(e){
@@ -2098,6 +3365,13 @@ var sortOrder;
if ( document.documentElement.compareDocumentPosition ) {
sortOrder = function( a, b ) {
+ if ( !a.compareDocumentPosition || !b.compareDocumentPosition ) {
+ if ( a == b ) {
+ hasDuplicate = true;
+ }
+ return a.compareDocumentPosition ? -1 : 1;
+ }
+
var ret = a.compareDocumentPosition(b) & 4 ? -1 : a === b ? 0 : 1;
if ( ret === 0 ) {
hasDuplicate = true;
@@ -2106,6 +3380,13 @@ if ( document.documentElement.compareDocumentPosition ) {
};
} else if ( "sourceIndex" in document.documentElement ) {
sortOrder = function( a, b ) {
+ if ( !a.sourceIndex || !b.sourceIndex ) {
+ if ( a == b ) {
+ hasDuplicate = true;
+ }
+ return a.sourceIndex ? -1 : 1;
+ }
+
var ret = a.sourceIndex - b.sourceIndex;
if ( ret === 0 ) {
hasDuplicate = true;
@@ -2114,11 +3395,18 @@ if ( document.documentElement.compareDocumentPosition ) {
};
} else if ( document.createRange ) {
sortOrder = function( a, b ) {
+ if ( !a.ownerDocument || !b.ownerDocument ) {
+ if ( a == b ) {
+ hasDuplicate = true;
+ }
+ return a.ownerDocument ? -1 : 1;
+ }
+
var aRange = a.ownerDocument.createRange(), bRange = b.ownerDocument.createRange();
- aRange.selectNode(a);
- aRange.collapse(true);
- bRange.selectNode(b);
- bRange.collapse(true);
+ aRange.setStart(a, 0);
+ aRange.setEnd(a, 0);
+ bRange.setStart(b, 0);
+ bRange.setEnd(b, 0);
var ret = aRange.compareBoundaryPoints(Range.START_TO_END, bRange);
if ( ret === 0 ) {
hasDuplicate = true;
@@ -2127,13 +3415,33 @@ if ( document.documentElement.compareDocumentPosition ) {
};
}
+// Utility function for retreiving the text value of an array of DOM nodes
+function getText( elems ) {
+ var ret = "", elem;
+
+ for ( var i = 0; elems[i]; i++ ) {
+ elem = elems[i];
+
+ // Get the text from text nodes and CDATA nodes
+ if ( elem.nodeType === 3 || elem.nodeType === 4 ) {
+ ret += elem.nodeValue;
+
+ // Traverse everything else, except comment nodes
+ } else if ( elem.nodeType !== 8 ) {
+ ret += getText( elem.childNodes );
+ }
+ }
+
+ return ret;
+}
+
// Check to see if the browser returns elements by name when
// querying by getElementById (and provide a workaround)
(function(){
// We're going to inject a fake input element with a specified name
- var form = document.createElement("form"),
+ var form = document.createElement("div"),
id = "script" + (new Date).getTime();
- form.innerHTML = "<input name='" + id + "'/>";
+ form.innerHTML = "<a name='" + id + "'/>";
// Inject it into the root element, check its status, and remove it quickly
var root = document.documentElement;
@@ -2141,7 +3449,7 @@ if ( document.documentElement.compareDocumentPosition ) {
// The workaround has to do additional checks after a getElementById
// Which slows things down for other browsers (hence the branching)
- if ( !!document.getElementById( id ) ) {
+ if ( document.getElementById( id ) ) {
Expr.find.ID = function(match, context, isXML){
if ( typeof context.getElementById !== "undefined" && !isXML ) {
var m = context.getElementById(match[1]);
@@ -2156,6 +3464,7 @@ if ( document.documentElement.compareDocumentPosition ) {
}
root.removeChild( form );
+ root = form = null; // release memory in IE
})();
(function(){
@@ -2196,69 +3505,75 @@ if ( document.documentElement.compareDocumentPosition ) {
return elem.getAttribute("href", 2);
};
}
+
+ div = null; // release memory in IE
})();
-if ( document.querySelectorAll ) (function(){
- var oldSizzle = Sizzle, div = document.createElement("div");
- div.innerHTML = "<p class='TEST'></p>";
+if ( document.querySelectorAll ) {
+ (function(){
+ var oldSizzle = Sizzle, div = document.createElement("div");
+ div.innerHTML = "<p class='TEST'></p>";
- // Safari can't handle uppercase or unicode characters when
- // in quirks mode.
- if ( div.querySelectorAll && div.querySelectorAll(".TEST").length === 0 ) {
- return;
- }
+ // Safari can't handle uppercase or unicode characters when
+ // in quirks mode.
+ if ( div.querySelectorAll && div.querySelectorAll(".TEST").length === 0 ) {
+ return;
+ }
- Sizzle = function(query, context, extra, seed){
- context = context || document;
+ Sizzle = function(query, context, extra, seed){
+ context = context || document;
+
+ // Only use querySelectorAll on non-XML documents
+ // (ID selectors don't work in non-HTML documents)
+ if ( !seed && context.nodeType === 9 && !isXML(context) ) {
+ try {
+ return makeArray( context.querySelectorAll(query), extra );
+ } catch(e){}
+ }
+
+ return oldSizzle(query, context, extra, seed);
+ };
- // Only use querySelectorAll on non-XML documents
- // (ID selectors don't work in non-HTML documents)
- if ( !seed && context.nodeType === 9 && !isXML(context) ) {
- try {
- return makeArray( context.querySelectorAll(query), extra );
- } catch(e){}
+ for ( var prop in oldSizzle ) {
+ Sizzle[ prop ] = oldSizzle[ prop ];
}
-
- return oldSizzle(query, context, extra, seed);
- };
- Sizzle.find = oldSizzle.find;
- Sizzle.filter = oldSizzle.filter;
- Sizzle.selectors = oldSizzle.selectors;
- Sizzle.matches = oldSizzle.matches;
-})();
+ div = null; // release memory in IE
+ })();
+}
-if ( document.getElementsByClassName && document.documentElement.getElementsByClassName ) (function(){
+(function(){
var div = document.createElement("div");
+
div.innerHTML = "<div class='test e'></div><div class='test'></div>";
// Opera can't find a second classname (in 9.6)
- if ( div.getElementsByClassName("e").length === 0 )
+ // Also, make sure that getElementsByClassName actually exists
+ if ( !div.getElementsByClassName || div.getElementsByClassName("e").length === 0 ) {
return;
+ }
// Safari caches class attributes, doesn't catch changes (in 3.2)
div.lastChild.className = "e";
- if ( div.getElementsByClassName("e").length === 1 )
+ if ( div.getElementsByClassName("e").length === 1 ) {
return;
-
+ }
+
Expr.order.splice(1, 0, "CLASS");
Expr.find.CLASS = function(match, context, isXML) {
if ( typeof context.getElementsByClassName !== "undefined" && !isXML ) {
return context.getElementsByClassName(match[1]);
}
};
+
+ div = null; // release memory in IE
})();
function dirNodeCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) {
- var sibDir = dir == "previousSibling" && !isXML;
for ( var i = 0, l = checkSet.length; i < l; i++ ) {
var elem = checkSet[i];
if ( elem ) {
- if ( sibDir && elem.nodeType === 1 ){
- elem.sizcache = doneName;
- elem.sizset = i;
- }
elem = elem[dir];
var match = false;
@@ -2273,7 +3588,7 @@ function dirNodeCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) {
elem.sizset = i;
}
- if ( elem.nodeName === cur ) {
+ if ( elem.nodeName.toLowerCase() === cur ) {
match = elem;
break;
}
@@ -2287,14 +3602,9 @@ function dirNodeCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) {
}
function dirCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) {
- var sibDir = dir == "previousSibling" && !isXML;
for ( var i = 0, l = checkSet.length; i < l; i++ ) {
var elem = checkSet[i];
if ( elem ) {
- if ( sibDir && elem.nodeType === 1 ) {
- elem.sizcache = doneName;
- elem.sizset = i;
- }
elem = elem[dir];
var match = false;
@@ -2329,15 +3639,17 @@ function dirCheck( dir, cur, doneName, checkSet, nodeCheck, isXML ) {
}
}
-var contains = document.compareDocumentPosition ? function(a, b){
- return a.compareDocumentPosition(b) & 16;
+var contains = document.compareDocumentPosition ? function(a, b){
+ return !!(a.compareDocumentPosition(b) & 16);
} : function(a, b){
return a !== b && (a.contains ? a.contains(b) : true);
};
var isXML = function(elem){
- return elem.nodeType === 9 && elem.documentElement.nodeName !== "HTML" ||
- !!elem.ownerDocument && isXML( elem.ownerDocument );
+ // documentElement is verified for cases where it doesn't yet exist
+ // (such as loading iframes in IE - #4833)
+ var documentElement = (elem ? elem.ownerDocument || elem : 0).documentElement;
+ return documentElement ? documentElement.nodeName !== "HTML" : false;
};
var posProcess = function(selector, context){
@@ -2362,872 +3674,1122 @@ var posProcess = function(selector, context){
// EXPOSE
jQuery.find = Sizzle;
-jQuery.filter = Sizzle.filter;
jQuery.expr = Sizzle.selectors;
jQuery.expr[":"] = jQuery.expr.filters;
+jQuery.unique = Sizzle.uniqueSort;
+jQuery.text = getText;
+jQuery.isXMLDoc = isXML;
+jQuery.contains = contains;
-Sizzle.selectors.filters.hidden = function(elem){
- return elem.offsetWidth === 0 || elem.offsetHeight === 0;
-};
+return;
-Sizzle.selectors.filters.visible = function(elem){
- return elem.offsetWidth > 0 || elem.offsetHeight > 0;
-};
+window.Sizzle = Sizzle;
-Sizzle.selectors.filters.animated = function(elem){
- return jQuery.grep(jQuery.timers, function(fn){
- return elem === fn.elem;
- }).length;
-};
+})();
+var runtil = /Until$/,
+ rparentsprev = /^(?:parents|prevUntil|prevAll)/,
+ // Note: This RegExp should be improved, or likely pulled from Sizzle
+ rmultiselector = /,/,
+ slice = Array.prototype.slice;
+
+// Implement the identical functionality for filter and not
+var winnow = function( elements, qualifier, keep ) {
+ if ( jQuery.isFunction( qualifier ) ) {
+ return jQuery.grep(elements, function( elem, i ) {
+ return !!qualifier.call( elem, i, elem ) === keep;
+ });
-jQuery.multiFilter = function( expr, elems, not ) {
- if ( not ) {
- expr = ":not(" + expr + ")";
- }
+ } else if ( qualifier.nodeType ) {
+ return jQuery.grep(elements, function( elem, i ) {
+ return (elem === qualifier) === keep;
+ });
- return Sizzle.matches(expr, elems);
-};
+ } else if ( typeof qualifier === "string" ) {
+ var filtered = jQuery.grep(elements, function( elem ) {
+ return elem.nodeType === 1;
+ });
-jQuery.dir = function( elem, dir ){
- var matched = [], cur = elem[dir];
- while ( cur && cur != document ) {
- if ( cur.nodeType == 1 )
- matched.push( cur );
- cur = cur[dir];
+ if ( isSimple.test( qualifier ) ) {
+ return jQuery.filter(qualifier, filtered, !keep);
+ } else {
+ qualifier = jQuery.filter( qualifier, filtered );
+ }
}
- return matched;
+
+ return jQuery.grep(elements, function( elem, i ) {
+ return (jQuery.inArray( elem, qualifier ) >= 0) === keep;
+ });
};
-jQuery.nth = function(cur, result, dir, elem){
- result = result || 1;
- var num = 0;
+jQuery.fn.extend({
+ find: function( selector ) {
+ var ret = this.pushStack( "", "find", selector ), length = 0;
+
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ length = ret.length;
+ jQuery.find( selector, this[i], ret );
+
+ if ( i > 0 ) {
+ // Make sure that the results are unique
+ for ( var n = length; n < ret.length; n++ ) {
+ for ( var r = 0; r < length; r++ ) {
+ if ( ret[r] === ret[n] ) {
+ ret.splice(n--, 1);
+ break;
+ }
+ }
+ }
+ }
+ }
- for ( ; cur; cur = cur[dir] )
- if ( cur.nodeType == 1 && ++num == result )
- break;
+ return ret;
+ },
- return cur;
-};
+ has: function( target ) {
+ var targets = jQuery( target );
+ return this.filter(function() {
+ for ( var i = 0, l = targets.length; i < l; i++ ) {
+ if ( jQuery.contains( this, targets[i] ) ) {
+ return true;
+ }
+ }
+ });
+ },
-jQuery.sibling = function(n, elem){
- var r = [];
+ not: function( selector ) {
+ return this.pushStack( winnow(this, selector, false), "not", selector);
+ },
- for ( ; n; n = n.nextSibling ) {
- if ( n.nodeType == 1 && n != elem )
- r.push( n );
- }
+ filter: function( selector ) {
+ return this.pushStack( winnow(this, selector, true), "filter", selector );
+ },
+
+ is: function( selector ) {
+ return !!selector && jQuery.filter( selector, this ).length > 0;
+ },
- return r;
-};
+ closest: function( selectors, context ) {
+ if ( jQuery.isArray( selectors ) ) {
+ var ret = [], cur = this[0], match, matches = {}, selector;
-return;
+ if ( cur && selectors.length ) {
+ for ( var i = 0, l = selectors.length; i < l; i++ ) {
+ selector = selectors[i];
-window.Sizzle = Sizzle;
+ if ( !matches[selector] ) {
+ matches[selector] = jQuery.expr.match.POS.test( selector ) ?
+ jQuery( selector, context || this.context ) :
+ selector;
+ }
+ }
-})();
-/*
- * A number of helper functions used for managing events.
- * Many of the ideas behind this code originated from
- * Dean Edwards' addEvent library.
- */
-jQuery.event = {
+ while ( cur && cur.ownerDocument && cur !== context ) {
+ for ( selector in matches ) {
+ match = matches[selector];
- // Bind an event to an element
- // Original by Dean Edwards
- add: function(elem, types, handler, data) {
- if ( elem.nodeType == 3 || elem.nodeType == 8 )
- return;
+ if ( match.jquery ? match.index(cur) > -1 : jQuery(cur).is(match) ) {
+ ret.push({ selector: selector, elem: cur });
+ delete matches[selector];
+ }
+ }
+ cur = cur.parentNode;
+ }
+ }
- // For whatever reason, IE has trouble passing the window object
- // around, causing it to be cloned in the process
- if ( elem.setInterval && elem != window )
- elem = window;
+ return ret;
+ }
- // Make sure that the function being executed has a unique ID
- if ( !handler.guid )
- handler.guid = this.guid++;
+ var pos = jQuery.expr.match.POS.test( selectors ) ?
+ jQuery( selectors, context || this.context ) : null;
- // if data is passed, bind to handler
- if ( data !== undefined ) {
- // Create temporary function pointer to original handler
- var fn = handler;
+ return this.map(function( i, cur ) {
+ while ( cur && cur.ownerDocument && cur !== context ) {
+ if ( pos ? pos.index(cur) > -1 : jQuery(cur).is(selectors) ) {
+ return cur;
+ }
+ cur = cur.parentNode;
+ }
+ return null;
+ });
+ },
+
+ // Determine the position of an element within
+ // the matched set of elements
+ index: function( elem ) {
+ if ( !elem || typeof elem === "string" ) {
+ return jQuery.inArray( this[0],
+ // If it receives a string, the selector is used
+ // If it receives nothing, the siblings are used
+ elem ? jQuery( elem ) : this.parent().children() );
+ }
+ // Locate the position of the desired element
+ return jQuery.inArray(
+ // If it receives a jQuery object, the first element is used
+ elem.jquery ? elem[0] : elem, this );
+ },
+
+ add: function( selector, context ) {
+ var set = typeof selector === "string" ?
+ jQuery( selector, context || this.context ) :
+ jQuery.makeArray( selector ),
+ all = jQuery.merge( this.get(), set );
+
+ return this.pushStack( isDisconnected( set[0] ) || isDisconnected( all[0] ) ?
+ all :
+ jQuery.unique( all ) );
+ },
+
+ andSelf: function() {
+ return this.add( this.prevObject );
+ }
+});
- // Create unique handler function, wrapped around original handler
- handler = this.proxy( fn );
+// A painfully simple check to see if an element is disconnected
+// from a document (should be improved, where feasible).
+function isDisconnected( node ) {
+ return !node || !node.parentNode || node.parentNode.nodeType === 11;
+}
- // Store data in unique handler
- handler.data = data;
+jQuery.each({
+ parent: function( elem ) {
+ var parent = elem.parentNode;
+ return parent && parent.nodeType !== 11 ? parent : null;
+ },
+ parents: function( elem ) {
+ return jQuery.dir( elem, "parentNode" );
+ },
+ parentsUntil: function( elem, i, until ) {
+ return jQuery.dir( elem, "parentNode", until );
+ },
+ next: function( elem ) {
+ return jQuery.nth( elem, 2, "nextSibling" );
+ },
+ prev: function( elem ) {
+ return jQuery.nth( elem, 2, "previousSibling" );
+ },
+ nextAll: function( elem ) {
+ return jQuery.dir( elem, "nextSibling" );
+ },
+ prevAll: function( elem ) {
+ return jQuery.dir( elem, "previousSibling" );
+ },
+ nextUntil: function( elem, i, until ) {
+ return jQuery.dir( elem, "nextSibling", until );
+ },
+ prevUntil: function( elem, i, until ) {
+ return jQuery.dir( elem, "previousSibling", until );
+ },
+ siblings: function( elem ) {
+ return jQuery.sibling( elem.parentNode.firstChild, elem );
+ },
+ children: function( elem ) {
+ return jQuery.sibling( elem.firstChild );
+ },
+ contents: function( elem ) {
+ return jQuery.nodeName( elem, "iframe" ) ?
+ elem.contentDocument || elem.contentWindow.document :
+ jQuery.makeArray( elem.childNodes );
+ }
+}, function( name, fn ) {
+ jQuery.fn[ name ] = function( until, selector ) {
+ var ret = jQuery.map( this, fn, until );
+
+ if ( !runtil.test( name ) ) {
+ selector = until;
}
- // Init the element's event structure
- var events = jQuery.data(elem, "events") || jQuery.data(elem, "events", {}),
- handle = jQuery.data(elem, "handle") || jQuery.data(elem, "handle", function(){
- // Handle the second event of a trigger and when
- // an event is called after a page has unloaded
- return typeof jQuery !== "undefined" && !jQuery.event.triggered ?
- jQuery.event.handle.apply(arguments.callee.elem, arguments) :
- undefined;
- });
- // Add elem as a property of the handle function
- // This is to prevent a memory leak with non-native
- // event in IE.
- handle.elem = elem;
+ if ( selector && typeof selector === "string" ) {
+ ret = jQuery.filter( selector, ret );
+ }
- // Handle multiple events separated by a space
- // jQuery(...).bind("mouseover mouseout", fn);
- jQuery.each(types.split(/\s+/), function(index, type) {
- // Namespaced event handlers
- var namespaces = type.split(".");
- type = namespaces.shift();
- handler.type = namespaces.slice().sort().join(".");
+ ret = this.length > 1 ? jQuery.unique( ret ) : ret;
- // Get the current list of functions bound to this event
- var handlers = events[type];
-
- if ( jQuery.event.specialAll[type] )
- jQuery.event.specialAll[type].setup.call(elem, data, namespaces);
+ if ( (this.length > 1 || rmultiselector.test( selector )) && rparentsprev.test( name ) ) {
+ ret = ret.reverse();
+ }
- // Init the event handler queue
- if (!handlers) {
- handlers = events[type] = {};
+ return this.pushStack( ret, name, slice.call(arguments).join(",") );
+ };
+});
- // Check for a special event handler
- // Only use addEventListener/attachEvent if the special
- // events handler returns false
- if ( !jQuery.event.special[type] || jQuery.event.special[type].setup.call(elem, data, namespaces) === false ) {
- // Bind the global event handler to the element
- if (elem.addEventListener)
- elem.addEventListener(type, handle, false);
- else if (elem.attachEvent)
- elem.attachEvent("on" + type, handle);
- }
+jQuery.extend({
+ filter: function( expr, elems, not ) {
+ if ( not ) {
+ expr = ":not(" + expr + ")";
+ }
+
+ return jQuery.find.matches(expr, elems);
+ },
+
+ dir: function( elem, dir, until ) {
+ var matched = [], cur = elem[dir];
+ while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) {
+ if ( cur.nodeType === 1 ) {
+ matched.push( cur );
}
+ cur = cur[dir];
+ }
+ return matched;
+ },
- // Add the function to the element's handler list
- handlers[handler.guid] = handler;
+ nth: function( cur, result, dir, elem ) {
+ result = result || 1;
+ var num = 0;
- // Keep track of which events have been used, for global triggering
- jQuery.event.global[type] = true;
- });
+ for ( ; cur; cur = cur[dir] ) {
+ if ( cur.nodeType === 1 && ++num === result ) {
+ break;
+ }
+ }
- // Nullify elem to prevent memory leaks in IE
- elem = null;
+ return cur;
},
- guid: 1,
- global: {},
+ sibling: function( n, elem ) {
+ var r = [];
- // Detach an event or set of events from an element
- remove: function(elem, types, handler) {
- // don't do events on text and comment nodes
- if ( elem.nodeType == 3 || elem.nodeType == 8 )
- return;
+ for ( ; n; n = n.nextSibling ) {
+ if ( n.nodeType === 1 && n !== elem ) {
+ r.push( n );
+ }
+ }
- var events = jQuery.data(elem, "events"), ret, index;
+ return r;
+ }
+});
+var rinlinejQuery = / jQuery\d+="(?:\d+|null)"/g,
+ rleadingWhitespace = /^\s+/,
+ rxhtmlTag = /(<([\w:]+)[^>]*?)\/>/g,
+ rselfClosing = /^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,
+ rtagName = /<([\w:]+)/,
+ rtbody = /<tbody/i,
+ rhtml = /<|&#?\w+;/,
+ rnocache = /<script|<object|<embed|<option|<style/i,
+ rchecked = /checked\s*(?:[^=]|=\s*.checked.)/i, // checked="checked" or checked (html5)
+ fcloseTag = function( all, front, tag ) {
+ return rselfClosing.test( tag ) ?
+ all :
+ front + "></" + tag + ">";
+ },
+ wrapMap = {
+ option: [ 1, "<select multiple='multiple'>", "</select>" ],
+ legend: [ 1, "<fieldset>", "</fieldset>" ],
+ thead: [ 1, "<table>", "</table>" ],
+ tr: [ 2, "<table><tbody>", "</tbody></table>" ],
+ td: [ 3, "<table><tbody><tr>", "</tr></tbody></table>" ],
+ col: [ 2, "<table><tbody></tbody><colgroup>", "</colgroup></table>" ],
+ area: [ 1, "<map>", "</map>" ],
+ _default: [ 0, "", "" ]
+ };
- if ( events ) {
- // Unbind all events for the element
- if ( types === undefined || (typeof types === "string" && types.charAt(0) == ".") )
- for ( var type in events )
- this.remove( elem, type + (types || "") );
- else {
- // types is actually an event object here
- if ( types.type ) {
- handler = types.handler;
- types = types.type;
- }
+wrapMap.optgroup = wrapMap.option;
+wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead;
+wrapMap.th = wrapMap.td;
- // Handle multiple events seperated by a space
- // jQuery(...).unbind("mouseover mouseout", fn);
- jQuery.each(types.split(/\s+/), function(index, type){
- // Namespaced event handlers
- var namespaces = type.split(".");
- type = namespaces.shift();
- var namespace = RegExp("(^|\\.)" + namespaces.slice().sort().join(".*\\.") + "(\\.|$)");
-
- if ( events[type] ) {
- // remove the given handler for the given type
- if ( handler )
- delete events[type][handler.guid];
-
- // remove all handlers for the given type
- else
- for ( var handle in events[type] )
- // Handle the removal of namespaced events
- if ( namespace.test(events[type][handle].type) )
- delete events[type][handle];
-
- if ( jQuery.event.specialAll[type] )
- jQuery.event.specialAll[type].teardown.call(elem, namespaces);
-
- // remove generic event handler if no more handlers exist
- for ( ret in events[type] ) break;
- if ( !ret ) {
- if ( !jQuery.event.special[type] || jQuery.event.special[type].teardown.call(elem, namespaces) === false ) {
- if (elem.removeEventListener)
- elem.removeEventListener(type, jQuery.data(elem, "handle"), false);
- else if (elem.detachEvent)
- elem.detachEvent("on" + type, jQuery.data(elem, "handle"));
- }
- ret = null;
- delete events[type];
- }
- }
- });
- }
+// IE can't serialize <link> and <script> tags normally
+if ( !jQuery.support.htmlSerialize ) {
+ wrapMap._default = [ 1, "div<div>", "</div>" ];
+}
- // Remove the expando if it's no longer used
- for ( ret in events ) break;
- if ( !ret ) {
- var handle = jQuery.data( elem, "handle" );
- if ( handle ) handle.elem = null;
- jQuery.removeData( elem, "events" );
- jQuery.removeData( elem, "handle" );
- }
+jQuery.fn.extend({
+ text: function( text ) {
+ if ( jQuery.isFunction(text) ) {
+ return this.each(function(i) {
+ var self = jQuery(this);
+ self.text( text.call(this, i, self.text()) );
+ });
}
+
+ if ( typeof text !== "object" && text !== undefined ) {
+ return this.empty().append( (this[0] && this[0].ownerDocument || document).createTextNode( text ) );
+ }
+
+ return jQuery.text( this );
},
- // bubbling is internal
- trigger: function( event, data, elem, bubbling ) {
- // Event object or event type
- var type = event.type || event;
+ wrapAll: function( html ) {
+ if ( jQuery.isFunction( html ) ) {
+ return this.each(function(i) {
+ jQuery(this).wrapAll( html.call(this, i) );
+ });
+ }
- if( !bubbling ){
- event = typeof event === "object" ?
- // jQuery.Event object
- event[expando] ? event :
- // Object literal
- jQuery.extend( jQuery.Event(type), event ) :
- // Just the event type (string)
- jQuery.Event(type);
+ if ( this[0] ) {
+ // The elements to wrap the target around
+ var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true);
- if ( type.indexOf("!") >= 0 ) {
- event.type = type = type.slice(0, -1);
- event.exclusive = true;
+ if ( this[0].parentNode ) {
+ wrap.insertBefore( this[0] );
}
- // Handle a global trigger
- if ( !elem ) {
- // Don't bubble custom events when global (to avoid too much overhead)
- event.stopPropagation();
- // Only trigger if we've ever bound an event for it
- if ( this.global[type] )
- jQuery.each( jQuery.cache, function(){
- if ( this.events && this.events[type] )
- jQuery.event.trigger( event, data, this.handle.elem );
- });
- }
+ wrap.map(function() {
+ var elem = this;
- // Handle triggering a single element
+ while ( elem.firstChild && elem.firstChild.nodeType === 1 ) {
+ elem = elem.firstChild;
+ }
- // don't do events on text and comment nodes
- if ( !elem || elem.nodeType == 3 || elem.nodeType == 8 )
- return undefined;
-
- // Clean up in case it is reused
- event.result = undefined;
- event.target = elem;
-
- // Clone the incoming data, if any
- data = jQuery.makeArray(data);
- data.unshift( event );
+ return elem;
+ }).append(this);
}
- event.currentTarget = elem;
+ return this;
+ },
- // Trigger the event, it is assumed that "handle" is a function
- var handle = jQuery.data(elem, "handle");
- if ( handle )
- handle.apply( elem, data );
+ wrapInner: function( html ) {
+ if ( jQuery.isFunction( html ) ) {
+ return this.each(function(i) {
+ jQuery(this).wrapInner( html.call(this, i) );
+ });
+ }
- // Handle triggering native .onfoo handlers (and on links since we don't call .click() for links)
- if ( (!elem[type] || (jQuery.nodeName(elem, 'a') && type == "click")) && elem["on"+type] && elem["on"+type].apply( elem, data ) === false )
- event.result = false;
+ return this.each(function() {
+ var self = jQuery( this ), contents = self.contents();
- // Trigger the native events (except for clicks on links)
- if ( !bubbling && elem[type] && !event.isDefaultPrevented() && !(jQuery.nodeName(elem, 'a') && type == "click") ) {
- this.triggered = true;
- try {
- elem[ type ]();
- // prevent IE from throwing an error for some hidden elements
- } catch (e) {}
- }
+ if ( contents.length ) {
+ contents.wrapAll( html );
- this.triggered = false;
+ } else {
+ self.append( html );
+ }
+ });
+ },
+
+ wrap: function( html ) {
+ return this.each(function() {
+ jQuery( this ).wrapAll( html );
+ });
+ },
+
+ unwrap: function() {
+ return this.parent().each(function() {
+ if ( !jQuery.nodeName( this, "body" ) ) {
+ jQuery( this ).replaceWith( this.childNodes );
+ }
+ }).end();
+ },
+
+ append: function() {
+ return this.domManip(arguments, true, function( elem ) {
+ if ( this.nodeType === 1 ) {
+ this.appendChild( elem );
+ }
+ });
+ },
- if ( !event.isPropagationStopped() ) {
- var parent = elem.parentNode || elem.ownerDocument;
- if ( parent )
- jQuery.event.trigger(event, data, parent, true);
+ prepend: function() {
+ return this.domManip(arguments, true, function( elem ) {
+ if ( this.nodeType === 1 ) {
+ this.insertBefore( elem, this.firstChild );
+ }
+ });
+ },
+
+ before: function() {
+ if ( this[0] && this[0].parentNode ) {
+ return this.domManip(arguments, false, function( elem ) {
+ this.parentNode.insertBefore( elem, this );
+ });
+ } else if ( arguments.length ) {
+ var set = jQuery(arguments[0]);
+ set.push.apply( set, this.toArray() );
+ return this.pushStack( set, "before", arguments );
}
},
- handle: function(event) {
- // returned undefined or false
- var all, handlers;
+ after: function() {
+ if ( this[0] && this[0].parentNode ) {
+ return this.domManip(arguments, false, function( elem ) {
+ this.parentNode.insertBefore( elem, this.nextSibling );
+ });
+ } else if ( arguments.length ) {
+ var set = this.pushStack( this, "after", arguments );
+ set.push.apply( set, jQuery(arguments[0]).toArray() );
+ return set;
+ }
+ },
+
+ // keepData is for internal use only--do not document
+ remove: function( selector, keepData ) {
+ for ( var i = 0, elem; (elem = this[i]) != null; i++ ) {
+ if ( !selector || jQuery.filter( selector, [ elem ] ).length ) {
+ if ( !keepData && elem.nodeType === 1 ) {
+ jQuery.cleanData( elem.getElementsByTagName("*") );
+ jQuery.cleanData( [ elem ] );
+ }
- event = arguments[0] = jQuery.event.fix( event || window.event );
- event.currentTarget = this;
+ if ( elem.parentNode ) {
+ elem.parentNode.removeChild( elem );
+ }
+ }
+ }
- // Namespaced event handlers
- var namespaces = event.type.split(".");
- event.type = namespaces.shift();
+ return this;
+ },
+
+ empty: function() {
+ for ( var i = 0, elem; (elem = this[i]) != null; i++ ) {
+ // Remove element nodes and prevent memory leaks
+ if ( elem.nodeType === 1 ) {
+ jQuery.cleanData( elem.getElementsByTagName("*") );
+ }
- // Cache this now, all = true means, any handler
- all = !namespaces.length && !event.exclusive;
+ // Remove any remaining nodes
+ while ( elem.firstChild ) {
+ elem.removeChild( elem.firstChild );
+ }
+ }
- var namespace = RegExp("(^|\\.)" + namespaces.slice().sort().join(".*\\.") + "(\\.|$)");
+ return this;
+ },
- handlers = ( jQuery.data(this, "events") || {} )[event.type];
+ clone: function( events ) {
+ // Do the clone
+ var ret = this.map(function() {
+ if ( !jQuery.support.noCloneEvent && !jQuery.isXMLDoc(this) ) {
+ // IE copies events bound via attachEvent when
+ // using cloneNode. Calling detachEvent on the
+ // clone will also remove the events from the orignal
+ // In order to get around this, we use innerHTML.
+ // Unfortunately, this means some modifications to
+ // attributes in IE that are actually only stored
+ // as properties will not be copied (such as the
+ // the name attribute on an input).
+ var html = this.outerHTML, ownerDocument = this.ownerDocument;
+ if ( !html ) {
+ var div = ownerDocument.createElement("div");
+ div.appendChild( this.cloneNode(true) );
+ html = div.innerHTML;
+ }
- for ( var j in handlers ) {
- var handler = handlers[j];
+ return jQuery.clean([html.replace(rinlinejQuery, "")
+ // Handle the case in IE 8 where action=/test/> self-closes a tag
+ .replace(/=([^="'>\s]+\/)>/g, '="$1">')
+ .replace(rleadingWhitespace, "")], ownerDocument)[0];
+ } else {
+ return this.cloneNode(true);
+ }
+ });
- // Filter the functions by class
- if ( all || namespace.test(handler.type) ) {
- // Pass in a reference to the handler function itself
- // So that we can later remove it
- event.handler = handler;
- event.data = handler.data;
+ // Copy the events from the original to the clone
+ if ( events === true ) {
+ cloneCopyEvent( this, ret );
+ cloneCopyEvent( this.find("*"), ret.find("*") );
+ }
- var ret = handler.apply(this, arguments);
+ // Return the cloned set
+ return ret;
+ },
+
+ html: function( value ) {
+ if ( value === undefined ) {
+ return this[0] && this[0].nodeType === 1 ?
+ this[0].innerHTML.replace(rinlinejQuery, "") :
+ null;
- if( ret !== undefined ){
- event.result = ret;
- if ( ret === false ) {
- event.preventDefault();
- event.stopPropagation();
+ // See if we can take a shortcut and just use innerHTML
+ } else if ( typeof value === "string" && !rnocache.test( value ) &&
+ (jQuery.support.leadingWhitespace || !rleadingWhitespace.test( value )) &&
+ !wrapMap[ (rtagName.exec( value ) || ["", ""])[1].toLowerCase() ] ) {
+
+ value = value.replace(rxhtmlTag, fcloseTag);
+
+ try {
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ // Remove element nodes and prevent memory leaks
+ if ( this[i].nodeType === 1 ) {
+ jQuery.cleanData( this[i].getElementsByTagName("*") );
+ this[i].innerHTML = value;
}
}
- if( event.isImmediatePropagationStopped() )
- break;
-
+ // If using innerHTML throws an exception, use the fallback method
+ } catch(e) {
+ this.empty().append( value );
}
+
+ } else if ( jQuery.isFunction( value ) ) {
+ this.each(function(i){
+ var self = jQuery(this), old = self.html();
+ self.empty().append(function(){
+ return value.call( this, i, old );
+ });
+ });
+
+ } else {
+ this.empty().append( value );
}
+
+ return this;
},
- props: "altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+ replaceWith: function( value ) {
+ if ( this[0] && this[0].parentNode ) {
+ // Make sure that the elements are removed from the DOM before they are inserted
+ // this can help fix replacing a parent with child elements
+ if ( jQuery.isFunction( value ) ) {
+ return this.each(function(i) {
+ var self = jQuery(this), old = self.html();
+ self.replaceWith( value.call( this, i, old ) );
+ });
+ }
- fix: function(event) {
- if ( event[expando] )
- return event;
+ if ( typeof value !== "string" ) {
+ value = jQuery(value).detach();
+ }
- // store a copy of the original event object
- // and "clone" to set read-only properties
- var originalEvent = event;
- event = jQuery.Event( originalEvent );
+ return this.each(function() {
+ var next = this.nextSibling, parent = this.parentNode;
- for ( var i = this.props.length, prop; i; ){
- prop = this.props[ --i ];
- event[ prop ] = originalEvent[ prop ];
+ jQuery(this).remove();
+
+ if ( next ) {
+ jQuery(next).before( value );
+ } else {
+ jQuery(parent).append( value );
+ }
+ });
+ } else {
+ return this.pushStack( jQuery(jQuery.isFunction(value) ? value() : value), "replaceWith", value );
}
+ },
- // Fix target property, if necessary
- if ( !event.target )
- event.target = event.srcElement || document; // Fixes #1925 where srcElement might not be defined either
+ detach: function( selector ) {
+ return this.remove( selector, true );
+ },
- // check if target is a textnode (safari)
- if ( event.target.nodeType == 3 )
- event.target = event.target.parentNode;
+ domManip: function( args, table, callback ) {
+ var results, first, value = args[0], scripts = [], fragment, parent;
- // Add relatedTarget, if necessary
- if ( !event.relatedTarget && event.fromElement )
- event.relatedTarget = event.fromElement == event.target ? event.toElement : event.fromElement;
+ // We can't cloneNode fragments that contain checked, in WebKit
+ if ( !jQuery.support.checkClone && arguments.length === 3 && typeof value === "string" && rchecked.test( value ) ) {
+ return this.each(function() {
+ jQuery(this).domManip( args, table, callback, true );
+ });
+ }
- // Calculate pageX/Y if missing and clientX/Y available
- if ( event.pageX == null && event.clientX != null ) {
- var doc = document.documentElement, body = document.body;
- event.pageX = event.clientX + (doc && doc.scrollLeft || body && body.scrollLeft || 0) - (doc.clientLeft || 0);
- event.pageY = event.clientY + (doc && doc.scrollTop || body && body.scrollTop || 0) - (doc.clientTop || 0);
+ if ( jQuery.isFunction(value) ) {
+ return this.each(function(i) {
+ var self = jQuery(this);
+ args[0] = value.call(this, i, table ? self.html() : undefined);
+ self.domManip( args, table, callback );
+ });
}
- // Add which for key events
- if ( !event.which && ((event.charCode || event.charCode === 0) ? event.charCode : event.keyCode) )
- event.which = event.charCode || event.keyCode;
+ if ( this[0] ) {
+ parent = value && value.parentNode;
- // Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs)
- if ( !event.metaKey && event.ctrlKey )
- event.metaKey = event.ctrlKey;
+ // If we're in a fragment, just use that instead of building a new one
+ if ( jQuery.support.parentNode && parent && parent.nodeType === 11 && parent.childNodes.length === this.length ) {
+ results = { fragment: parent };
- // Add which for click: 1 == left; 2 == middle; 3 == right
- // Note: button is not normalized, so don't use it
- if ( !event.which && event.button )
- event.which = (event.button & 1 ? 1 : ( event.button & 2 ? 3 : ( event.button & 4 ? 2 : 0 ) ));
+ } else {
+ results = buildFragment( args, this, scripts );
+ }
+
+ fragment = results.fragment;
+
+ if ( fragment.childNodes.length === 1 ) {
+ first = fragment = fragment.firstChild;
+ } else {
+ first = fragment.firstChild;
+ }
- return event;
- },
+ if ( first ) {
+ table = table && jQuery.nodeName( first, "tr" );
+
+ for ( var i = 0, l = this.length; i < l; i++ ) {
+ callback.call(
+ table ?
+ root(this[i], first) :
+ this[i],
+ i > 0 || results.cacheable || this.length > 1 ?
+ fragment.cloneNode(true) :
+ fragment
+ );
+ }
+ }
- proxy: function( fn, proxy ){
- proxy = proxy || function(){ return fn.apply(this, arguments); };
- // Set the guid of unique handler to the same of original handler, so it can be removed
- proxy.guid = fn.guid = fn.guid || proxy.guid || this.guid++;
- // So proxy can be declared as an argument
- return proxy;
- },
+ if ( scripts.length ) {
+ jQuery.each( scripts, evalScript );
+ }
+ }
- special: {
- ready: {
- // Make sure the ready event is setup
- setup: bindReady,
- teardown: function() {}
+ return this;
+
+ function root( elem, cur ) {
+ return jQuery.nodeName(elem, "table") ?
+ (elem.getElementsByTagName("tbody")[0] ||
+ elem.appendChild(elem.ownerDocument.createElement("tbody"))) :
+ elem;
}
- },
-
- specialAll: {
- live: {
- setup: function( selector, namespaces ){
- jQuery.event.add( this, namespaces[0], liveHandler );
- },
- teardown: function( namespaces ){
- if ( namespaces.length ) {
- var remove = 0, name = RegExp("(^|\\.)" + namespaces[0] + "(\\.|$)");
-
- jQuery.each( (jQuery.data(this, "events").live || {}), function(){
- if ( name.test(this.type) )
- remove++;
- });
-
- if ( remove < 1 )
- jQuery.event.remove( this, namespaces[0], liveHandler );
+ }
+});
+
+function cloneCopyEvent(orig, ret) {
+ var i = 0;
+
+ ret.each(function() {
+ if ( this.nodeName !== (orig[i] && orig[i].nodeName) ) {
+ return;
+ }
+
+ var oldData = jQuery.data( orig[i++] ), curData = jQuery.data( this, oldData ), events = oldData && oldData.events;
+
+ if ( events ) {
+ delete curData.handle;
+ curData.events = {};
+
+ for ( var type in events ) {
+ for ( var handler in events[ type ] ) {
+ jQuery.event.add( this, type, events[ type ][ handler ], events[ type ][ handler ].data );
}
}
}
+ });
+}
+
+function buildFragment( args, nodes, scripts ) {
+ var fragment, cacheable, cacheresults,
+ doc = (nodes && nodes[0] ? nodes[0].ownerDocument || nodes[0] : document);
+
+ // Only cache "small" (1/2 KB) strings that are associated with the main document
+ // Cloning options loses the selected state, so don't cache them
+ // IE 6 doesn't like it when you put <object> or <embed> elements in a fragment
+ // Also, WebKit does not clone 'checked' attributes on cloneNode, so don't cache
+ if ( args.length === 1 && typeof args[0] === "string" && args[0].length < 512 && doc === document &&
+ !rnocache.test( args[0] ) && (jQuery.support.checkClone || !rchecked.test( args[0] )) ) {
+
+ cacheable = true;
+ cacheresults = jQuery.fragments[ args[0] ];
+ if ( cacheresults ) {
+ if ( cacheresults !== 1 ) {
+ fragment = cacheresults;
+ }
+ }
}
-};
-jQuery.Event = function( src ){
- // Allow instantiation without the 'new' keyword
- if( !this.preventDefault )
- return new jQuery.Event(src);
-
- // Event object
- if( src && src.type ){
- this.originalEvent = src;
- this.type = src.type;
- // Event type
- }else
- this.type = src;
+ if ( !fragment ) {
+ fragment = doc.createDocumentFragment();
+ jQuery.clean( args, doc, fragment, scripts );
+ }
- // timeStamp is buggy for some events on Firefox(#3843)
- // So we won't rely on the native value
- this.timeStamp = now();
-
- // Mark it as fixed
- this[expando] = true;
-};
+ if ( cacheable ) {
+ jQuery.fragments[ args[0] ] = cacheresults ? fragment : 1;
+ }
-function returnFalse(){
- return false;
+ return { fragment: fragment, cacheable: cacheable };
}
-function returnTrue(){
- return true;
-}
-
-// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding
-// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html
-jQuery.Event.prototype = {
- preventDefault: function() {
- this.isDefaultPrevented = returnTrue;
- var e = this.originalEvent;
- if( !e )
- return;
- // if preventDefault exists run it on the original event
- if (e.preventDefault)
- e.preventDefault();
- // otherwise set the returnValue property of the original event to false (IE)
- e.returnValue = false;
- },
- stopPropagation: function() {
- this.isPropagationStopped = returnTrue;
+jQuery.fragments = {};
- var e = this.originalEvent;
- if( !e )
- return;
- // if stopPropagation exists run it on the original event
- if (e.stopPropagation)
- e.stopPropagation();
- // otherwise set the cancelBubble property of the original event to true (IE)
- e.cancelBubble = true;
- },
- stopImmediatePropagation:function(){
- this.isImmediatePropagationStopped = returnTrue;
- this.stopPropagation();
- },
- isDefaultPrevented: returnFalse,
- isPropagationStopped: returnFalse,
- isImmediatePropagationStopped: returnFalse
-};
-// Checks if an event happened on an element within another element
-// Used in jQuery.event.special.mouseenter and mouseleave handlers
-var withinElement = function(event) {
- // Check if mouse(over|out) are still within the same parent element
- var parent = event.relatedTarget;
- // Traverse up the tree
- while ( parent && parent != this )
- try { parent = parent.parentNode; }
- catch(e) { parent = this; }
-
- if( parent != this ){
- // set the correct event type
- event.type = event.data;
- // handle event if we actually just moused on to a non sub-element
- jQuery.event.handle.apply( this, arguments );
- }
-};
-
-jQuery.each({
- mouseover: 'mouseenter',
- mouseout: 'mouseleave'
-}, function( orig, fix ){
- jQuery.event.special[ fix ] = {
- setup: function(){
- jQuery.event.add( this, orig, withinElement, fix );
- },
- teardown: function(){
- jQuery.event.remove( this, orig, withinElement );
+jQuery.each({
+ appendTo: "append",
+ prependTo: "prepend",
+ insertBefore: "before",
+ insertAfter: "after",
+ replaceAll: "replaceWith"
+}, function( name, original ) {
+ jQuery.fn[ name ] = function( selector ) {
+ var ret = [], insert = jQuery( selector ),
+ parent = this.length === 1 && this[0].parentNode;
+
+ if ( parent && parent.nodeType === 11 && parent.childNodes.length === 1 && insert.length === 1 ) {
+ insert[ original ]( this[0] );
+ return this;
+
+ } else {
+ for ( var i = 0, l = insert.length; i < l; i++ ) {
+ var elems = (i > 0 ? this.clone(true) : this).get();
+ jQuery.fn[ original ].apply( jQuery(insert[i]), elems );
+ ret = ret.concat( elems );
+ }
+
+ return this.pushStack( ret, name, insert.selector );
}
- };
+ };
});
-jQuery.fn.extend({
- bind: function( type, data, fn ) {
- return type == "unload" ? this.one(type, data, fn) : this.each(function(){
- jQuery.event.add( this, type, fn || data, fn && data );
- });
- },
+jQuery.extend({
+ clean: function( elems, context, fragment, scripts ) {
+ context = context || document;
- one: function( type, data, fn ) {
- var one = jQuery.event.proxy( fn || data, function(event) {
- jQuery(this).unbind(event, one);
- return (fn || data).apply( this, arguments );
- });
- return this.each(function(){
- jQuery.event.add( this, type, one, fn && data);
- });
- },
+ // !context.createElement fails in IE with an error but returns typeof 'object'
+ if ( typeof context.createElement === "undefined" ) {
+ context = context.ownerDocument || context[0] && context[0].ownerDocument || document;
+ }
- unbind: function( type, fn ) {
- return this.each(function(){
- jQuery.event.remove( this, type, fn );
- });
- },
+ var ret = [];
- trigger: function( type, data ) {
- return this.each(function(){
- jQuery.event.trigger( type, data, this );
- });
- },
+ for ( var i = 0, elem; (elem = elems[i]) != null; i++ ) {
+ if ( typeof elem === "number" ) {
+ elem += "";
+ }
- triggerHandler: function( type, data ) {
- if( this[0] ){
- var event = jQuery.Event(type);
- event.preventDefault();
- event.stopPropagation();
- jQuery.event.trigger( event, data, this[0] );
- return event.result;
- }
- },
+ if ( !elem ) {
+ continue;
+ }
- toggle: function( fn ) {
- // Save reference to arguments for access in closure
- var args = arguments, i = 1;
+ // Convert html string into DOM nodes
+ if ( typeof elem === "string" && !rhtml.test( elem ) ) {
+ elem = context.createTextNode( elem );
- // link all the functions, so any of them can unbind this click handler
- while( i < args.length )
- jQuery.event.proxy( fn, args[i++] );
+ } else if ( typeof elem === "string" ) {
+ // Fix "XHTML"-style tags in all browsers
+ elem = elem.replace(rxhtmlTag, fcloseTag);
- return this.click( jQuery.event.proxy( fn, function(event) {
- // Figure out which function to execute
- this.lastToggle = ( this.lastToggle || 0 ) % i;
+ // Trim whitespace, otherwise indexOf won't work as expected
+ var tag = (rtagName.exec( elem ) || ["", ""])[1].toLowerCase(),
+ wrap = wrapMap[ tag ] || wrapMap._default,
+ depth = wrap[0],
+ div = context.createElement("div");
- // Make sure that clicks stop
- event.preventDefault();
+ // Go to html and back, then peel off extra wrappers
+ div.innerHTML = wrap[1] + elem + wrap[2];
- // and execute the function
- return args[ this.lastToggle++ ].apply( this, arguments ) || false;
- }));
- },
+ // Move to the right depth
+ while ( depth-- ) {
+ div = div.lastChild;
+ }
- hover: function(fnOver, fnOut) {
- return this.mouseenter(fnOver).mouseleave(fnOut);
- },
+ // Remove IE's autoinserted <tbody> from table fragments
+ if ( !jQuery.support.tbody ) {
- ready: function(fn) {
- // Attach the listeners
- bindReady();
+ // String was a <table>, *may* have spurious <tbody>
+ var hasBody = rtbody.test(elem),
+ tbody = tag === "table" && !hasBody ?
+ div.firstChild && div.firstChild.childNodes :
- // If the DOM is already ready
- if ( jQuery.isReady )
- // Execute the function immediately
- fn.call( document, jQuery );
+ // String was a bare <thead> or <tfoot>
+ wrap[1] === "<table>" && !hasBody ?
+ div.childNodes :
+ [];
- // Otherwise, remember the function for later
- else
- // Add the function to the wait list
- jQuery.readyList.push( fn );
+ for ( var j = tbody.length - 1; j >= 0 ; --j ) {
+ if ( jQuery.nodeName( tbody[ j ], "tbody" ) && !tbody[ j ].childNodes.length ) {
+ tbody[ j ].parentNode.removeChild( tbody[ j ] );
+ }
+ }
- return this;
- },
-
- live: function( type, fn ){
- var proxy = jQuery.event.proxy( fn );
- proxy.guid += this.selector + type;
+ }
- jQuery(document).bind( liveConvert(type, this.selector), this.selector, proxy );
+ // IE completely kills leading whitespace when innerHTML is used
+ if ( !jQuery.support.leadingWhitespace && rleadingWhitespace.test( elem ) ) {
+ div.insertBefore( context.createTextNode( rleadingWhitespace.exec(elem)[0] ), div.firstChild );
+ }
- return this;
+ elem = div.childNodes;
+ }
+
+ if ( elem.nodeType ) {
+ ret.push( elem );
+ } else {
+ ret = jQuery.merge( ret, elem );
+ }
+ }
+
+ if ( fragment ) {
+ for ( var i = 0; ret[i]; i++ ) {
+ if ( scripts && jQuery.nodeName( ret[i], "script" ) && (!ret[i].type || ret[i].type.toLowerCase() === "text/javascript") ) {
+ scripts.push( ret[i].parentNode ? ret[i].parentNode.removeChild( ret[i] ) : ret[i] );
+
+ } else {
+ if ( ret[i].nodeType === 1 ) {
+ ret.splice.apply( ret, [i + 1, 0].concat(jQuery.makeArray(ret[i].getElementsByTagName("script"))) );
+ }
+ fragment.appendChild( ret[i] );
+ }
+ }
+ }
+
+ return ret;
},
- die: function( type, fn ){
- jQuery(document).unbind( liveConvert(type, this.selector), fn ? { guid: fn.guid + this.selector + type } : null );
- return this;
+ cleanData: function( elems ) {
+ var data, id, cache = jQuery.cache,
+ special = jQuery.event.special,
+ deleteExpando = jQuery.support.deleteExpando;
+
+ for ( var i = 0, elem; (elem = elems[i]) != null; i++ ) {
+ id = elem[ jQuery.expando ];
+
+ if ( id ) {
+ data = cache[ id ];
+
+ if ( data.events ) {
+ for ( var type in data.events ) {
+ if ( special[ type ] ) {
+ jQuery.event.remove( elem, type );
+
+ } else {
+ removeEvent( elem, type, data.handle );
+ }
+ }
+ }
+
+ if ( deleteExpando ) {
+ delete elem[ jQuery.expando ];
+
+ } else if ( elem.removeAttribute ) {
+ elem.removeAttribute( jQuery.expando );
+ }
+
+ delete cache[ id ];
+ }
+ }
}
});
+// exclude the following css properties to add px
+var rexclude = /z-?index|font-?weight|opacity|zoom|line-?height/i,
+ ralpha = /alpha\([^)]*\)/,
+ ropacity = /opacity=([^)]*)/,
+ rfloat = /float/i,
+ rdashAlpha = /-([a-z])/ig,
+ rupper = /([A-Z])/g,
+ rnumpx = /^-?\d+(?:px)?$/i,
+ rnum = /^-?\d/,
+
+ cssShow = { position: "absolute", visibility: "hidden", display:"block" },
+ cssWidth = [ "Left", "Right" ],
+ cssHeight = [ "Top", "Bottom" ],
+
+ // cache check for defaultView.getComputedStyle
+ getComputedStyle = document.defaultView && document.defaultView.getComputedStyle,
+ // normalize float css property
+ styleFloat = jQuery.support.cssFloat ? "cssFloat" : "styleFloat",
+ fcamelCase = function( all, letter ) {
+ return letter.toUpperCase();
+ };
-function liveHandler( event ){
- var check = RegExp("(^|\\.)" + event.type + "(\\.|$)"),
- stop = true,
- elems = [];
-
- jQuery.each(jQuery.data(this, "events").live || [], function(i, fn){
- if ( check.test(fn.type) ) {
- var elem = jQuery(event.target).closest(fn.data)[0];
- if ( elem )
- elems.push({ elem: elem, fn: fn });
+jQuery.fn.css = function( name, value ) {
+ return access( this, name, value, true, function( elem, name, value ) {
+ if ( value === undefined ) {
+ return jQuery.curCSS( elem, name );
+ }
+
+ if ( typeof value === "number" && !rexclude.test(name) ) {
+ value += "px";
}
- });
- elems.sort(function(a,b) {
- return jQuery.data(a.elem, "closest") - jQuery.data(b.elem, "closest");
- });
-
- jQuery.each(elems, function(){
- if ( this.fn.call(this.elem, event, this.fn.data) === false )
- return (stop = false);
+ jQuery.style( elem, name, value );
});
+};
- return stop;
-}
+jQuery.extend({
+ style: function( elem, name, value ) {
+ // don't set styles on text and comment nodes
+ if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 ) {
+ return undefined;
+ }
-function liveConvert(type, selector){
- return ["live", type, selector.replace(/\./g, "`").replace(/ /g, "|")].join(".");
-}
+ // ignore negative width and height values #1599
+ if ( (name === "width" || name === "height") && parseFloat(value) < 0 ) {
+ value = undefined;
+ }
-jQuery.extend({
- isReady: false,
- readyList: [],
- // Handle when the DOM is ready
- ready: function() {
- // Make sure that the DOM is not already loaded
- if ( !jQuery.isReady ) {
- // Remember that the DOM is ready
- jQuery.isReady = true;
+ var style = elem.style || elem, set = value !== undefined;
- // If there are functions bound, to execute
- if ( jQuery.readyList ) {
- // Execute all of them
- jQuery.each( jQuery.readyList, function(){
- this.call( document, jQuery );
- });
+ // IE uses filters for opacity
+ if ( !jQuery.support.opacity && name === "opacity" ) {
+ if ( set ) {
+ // IE has trouble with opacity if it does not have layout
+ // Force it by setting the zoom level
+ style.zoom = 1;
- // Reset the list of functions
- jQuery.readyList = null;
+ // Set the alpha filter to set the opacity
+ var opacity = parseInt( value, 10 ) + "" === "NaN" ? "" : "alpha(opacity=" + value * 100 + ")";
+ var filter = style.filter || jQuery.curCSS( elem, "filter" ) || "";
+ style.filter = ralpha.test(filter) ? filter.replace(ralpha, opacity) : opacity;
}
- // Trigger any bound ready events
- jQuery(document).triggerHandler("ready");
+ return style.filter && style.filter.indexOf("opacity=") >= 0 ?
+ (parseFloat( ropacity.exec(style.filter)[1] ) / 100) + "":
+ "";
}
- }
-});
-var readyBound = false;
+ // Make sure we're using the right name for getting the float value
+ if ( rfloat.test( name ) ) {
+ name = styleFloat;
+ }
-function bindReady(){
- if ( readyBound ) return;
- readyBound = true;
+ name = name.replace(rdashAlpha, fcamelCase);
- // Mozilla, Opera and webkit nightlies currently support this event
- if ( document.addEventListener ) {
- // Use the handy event callback
- document.addEventListener( "DOMContentLoaded", function(){
- document.removeEventListener( "DOMContentLoaded", arguments.callee, false );
- jQuery.ready();
- }, false );
+ if ( set ) {
+ style[ name ] = value;
+ }
+
+ return style[ name ];
+ },
- // If IE event model is used
- } else if ( document.attachEvent ) {
- // ensure firing before onload,
- // maybe late but safe also for iframes
- document.attachEvent("onreadystatechange", function(){
- if ( document.readyState === "complete" ) {
- document.detachEvent( "onreadystatechange", arguments.callee );
- jQuery.ready();
+ css: function( elem, name, force, extra ) {
+ if ( name === "width" || name === "height" ) {
+ var val, props = cssShow, which = name === "width" ? cssWidth : cssHeight;
+
+ function getWH() {
+ val = name === "width" ? elem.offsetWidth : elem.offsetHeight;
+
+ if ( extra === "border" ) {
+ return;
+ }
+
+ jQuery.each( which, function() {
+ if ( !extra ) {
+ val -= parseFloat(jQuery.curCSS( elem, "padding" + this, true)) || 0;
+ }
+
+ if ( extra === "margin" ) {
+ val += parseFloat(jQuery.curCSS( elem, "margin" + this, true)) || 0;
+ } else {
+ val -= parseFloat(jQuery.curCSS( elem, "border" + this + "Width", true)) || 0;
+ }
+ });
}
- });
- // If IE and not an iframe
- // continually check to see if the document is ready
- if ( document.documentElement.doScroll && window == window.top ) (function(){
- if ( jQuery.isReady ) return;
+ if ( elem.offsetWidth !== 0 ) {
+ getWH();
+ } else {
+ jQuery.swap( elem, props, getWH );
+ }
- try {
- // If IE is used, use the trick by Diego Perini
- // http://javascript.nwbox.com/IEContentLoaded/
- document.documentElement.doScroll("left");
- } catch( error ) {
- setTimeout( arguments.callee, 0 );
- return;
+ return Math.max(0, Math.round(val));
+ }
+
+ return jQuery.curCSS( elem, name, force );
+ },
+
+ curCSS: function( elem, name, force ) {
+ var ret, style = elem.style, filter;
+
+ // IE uses filters for opacity
+ if ( !jQuery.support.opacity && name === "opacity" && elem.currentStyle ) {
+ ret = ropacity.test(elem.currentStyle.filter || "") ?
+ (parseFloat(RegExp.$1) / 100) + "" :
+ "";
+
+ return ret === "" ?
+ "1" :
+ ret;
+ }
+
+ // Make sure we're using the right name for getting the float value
+ if ( rfloat.test( name ) ) {
+ name = styleFloat;
+ }
+
+ if ( !force && style && style[ name ] ) {
+ ret = style[ name ];
+
+ } else if ( getComputedStyle ) {
+
+ // Only "float" is needed here
+ if ( rfloat.test( name ) ) {
+ name = "float";
}
- // and execute any waiting functions
- jQuery.ready();
- })();
- }
+ name = name.replace( rupper, "-$1" ).toLowerCase();
- // A fallback to window.onload, that will always work
- jQuery.event.add( window, "load", jQuery.ready );
-}
+ var defaultView = elem.ownerDocument.defaultView;
-jQuery.each( ("blur,focus,load,resize,scroll,unload,click,dblclick," +
- "mousedown,mouseup,mousemove,mouseover,mouseout,mouseenter,mouseleave," +
- "change,select,submit,keydown,keypress,keyup,error").split(","), function(i, name){
+ if ( !defaultView ) {
+ return null;
+ }
- // Handle event binding
- jQuery.fn[name] = function(fn){
- return fn ? this.bind(name, fn) : this.trigger(name);
- };
-});
+ var computedStyle = defaultView.getComputedStyle( elem, null );
-// Prevent memory leaks in IE
-// And prevent errors on refresh with events like mouseover in other browsers
-// Window isn't included so as not to unbind existing unload events
-jQuery( window ).bind( 'unload', function(){
- for ( var id in jQuery.cache )
- // Skip the window
- if ( id != 1 && jQuery.cache[ id ].handle )
- jQuery.event.remove( jQuery.cache[ id ].handle.elem );
-});
-(function(){
+ if ( computedStyle ) {
+ ret = computedStyle.getPropertyValue( name );
+ }
- jQuery.support = {};
+ // We should always get a number back from opacity
+ if ( name === "opacity" && ret === "" ) {
+ ret = "1";
+ }
- var root = document.documentElement,
- script = document.createElement("script"),
- div = document.createElement("div"),
- id = "script" + (new Date).getTime();
+ } else if ( elem.currentStyle ) {
+ var camelCase = name.replace(rdashAlpha, fcamelCase);
- div.style.display = "none";
- div.innerHTML = ' <link/><table></table><a href="/a" style="color:red;float:left;opacity:.5;">a</a><select><option>text</option></select><object><param/></object>';
+ ret = elem.currentStyle[ name ] || elem.currentStyle[ camelCase ];
- var all = div.getElementsByTagName("*"),
- a = div.getElementsByTagName("a")[0];
+ // From the awesome hack by Dean Edwards
+ // http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291
- // Can't get basic test support
- if ( !all || !all.length || !a ) {
- return;
- }
+ // If we're not dealing with a regular pixel number
+ // but a number that has a weird ending, we need to convert it to pixels
+ if ( !rnumpx.test( ret ) && rnum.test( ret ) ) {
+ // Remember the original values
+ var left = style.left, rsLeft = elem.runtimeStyle.left;
- jQuery.support = {
- // IE strips leading whitespace when .innerHTML is used
- leadingWhitespace: div.firstChild.nodeType == 3,
-
- // Make sure that tbody elements aren't automatically inserted
- // IE will insert them into empty tables
- tbody: !div.getElementsByTagName("tbody").length,
-
- // Make sure that you can get all elements in an <object> element
- // IE 7 always returns no results
- objectAll: !!div.getElementsByTagName("object")[0]
- .getElementsByTagName("*").length,
-
- // Make sure that link elements get serialized correctly by innerHTML
- // This requires a wrapper element in IE
- htmlSerialize: !!div.getElementsByTagName("link").length,
-
- // Get the style information from getAttribute
- // (IE uses .cssText insted)
- style: /red/.test( a.getAttribute("style") ),
-
- // Make sure that URLs aren't manipulated
- // (IE normalizes it by default)
- hrefNormalized: a.getAttribute("href") === "/a",
-
- // Make sure that element opacity exists
- // (IE uses filter instead)
- opacity: a.style.opacity === "0.5",
-
- // Verify style float existence
- // (IE uses styleFloat instead of cssFloat)
- cssFloat: !!a.style.cssFloat,
+ // Put in the new values to get a computed value out
+ elem.runtimeStyle.left = elem.currentStyle.left;
+ style.left = camelCase === "fontSize" ? "1em" : (ret || 0);
+ ret = style.pixelLeft + "px";
- // Will be defined later
- scriptEval: false,
- noCloneEvent: true,
- boxModel: null
- };
-
- script.type = "text/javascript";
- try {
- script.appendChild( document.createTextNode( "window." + id + "=1;" ) );
- } catch(e){}
+ // Revert the changed values
+ style.left = left;
+ elem.runtimeStyle.left = rsLeft;
+ }
+ }
- root.insertBefore( script, root.firstChild );
-
- // Make sure that the execution of code works by injecting a script
- // tag with appendChild/createTextNode
- // (IE doesn't support this, fails, and uses .text instead)
- if ( window[ id ] ) {
- jQuery.support.scriptEval = true;
- delete window[ id ];
- }
+ return ret;
+ },
- root.removeChild( script );
+ // A method for quickly swapping in/out CSS properties to get correct calculations
+ swap: function( elem, options, callback ) {
+ var old = {};
- if ( div.attachEvent && div.fireEvent ) {
- div.attachEvent("onclick", function(){
- // Cloning a node shouldn't copy over any
- // bound event handlers (IE does this)
- jQuery.support.noCloneEvent = false;
- div.detachEvent("onclick", arguments.callee);
- });
- div.cloneNode(true).fireEvent("onclick");
+ // Remember the old values, and insert the new ones
+ for ( var name in options ) {
+ old[ name ] = elem.style[ name ];
+ elem.style[ name ] = options[ name ];
+ }
+
+ callback.call( elem );
+
+ // Revert the old values
+ for ( var name in options ) {
+ elem.style[ name ] = old[ name ];
+ }
}
+});
- // Figure out if the W3C box model works as expected
- // document.body must exist before we can do this
- jQuery(function(){
- var div = document.createElement("div");
- div.style.width = div.style.paddingLeft = "1px";
+if ( jQuery.expr && jQuery.expr.filters ) {
+ jQuery.expr.filters.hidden = function( elem ) {
+ var width = elem.offsetWidth, height = elem.offsetHeight,
+ skip = elem.nodeName.toLowerCase() === "tr";
- document.body.appendChild( div );
- jQuery.boxModel = jQuery.support.boxModel = div.offsetWidth === 2;
- document.body.removeChild( div ).style.display = 'none';
- });
-})();
+ return width === 0 && height === 0 && !skip ?
+ true :
+ width > 0 && height > 0 && !skip ?
+ false :
+ jQuery.curCSS(elem, "display") === "none";
+ };
-var styleFloat = jQuery.support.cssFloat ? "cssFloat" : "styleFloat";
+ jQuery.expr.filters.visible = function( elem ) {
+ return !jQuery.expr.filters.hidden( elem );
+ };
+}
+var jsc = now(),
+ rscript = /<script(.|\s)*?\/script>/gi,
+ rselectTextarea = /select|textarea/i,
+ rinput = /color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,
+ jsre = /=\?(&|$)/,
+ rquery = /\?/,
+ rts = /(\?|&)_=.*?(&|$)/,
+ rurl = /^(\w+:)?\/\/([^\/?#]+)/,
+ r20 = /%20/g,
+
+ // Keep a copy of the old load method
+ _load = jQuery.fn.load;
-jQuery.props = {
- "for": "htmlFor",
- "class": "className",
- "float": styleFloat,
- cssFloat: styleFloat,
- styleFloat: styleFloat,
- readonly: "readOnly",
- maxlength: "maxLength",
- cellspacing: "cellSpacing",
- rowspan: "rowSpan",
- tabindex: "tabIndex"
-};
jQuery.fn.extend({
- // Keep a copy of the old load
- _load: jQuery.fn.load,
-
load: function( url, params, callback ) {
- if ( typeof url !== "string" )
- return this._load( url );
+ if ( typeof url !== "string" ) {
+ return _load.call( this, url );
+
+ // Don't do a request if no elements are being requested
+ } else if ( !this.length ) {
+ return this;
+ }
var off = url.indexOf(" ");
if ( off >= 0 ) {
@@ -3239,7 +4801,7 @@ jQuery.fn.extend({
var type = "GET";
// If the second parameter was provided
- if ( params )
+ if ( params ) {
// If it's a function
if ( jQuery.isFunction( params ) ) {
// We assume that it's the callback
@@ -3247,10 +4809,11 @@ jQuery.fn.extend({
params = null;
// Otherwise, build a param string
- } else if( typeof params === "object" ) {
- params = jQuery.param( params );
+ } else if ( typeof params === "object" ) {
+ params = jQuery.param( params, jQuery.ajaxSettings.traditional );
type = "POST";
}
+ }
var self = this;
@@ -3260,27 +4823,30 @@ jQuery.fn.extend({
type: type,
dataType: "html",
data: params,
- complete: function(res, status){
+ complete: function( res, status ) {
// If successful, inject the HTML into all the matched elements
- if ( status == "success" || status == "notmodified" )
+ if ( status === "success" || status === "notmodified" ) {
// See if a selector was specified
self.html( selector ?
// Create a dummy div to hold the results
- jQuery("<div/>")
+ jQuery("<div />")
// inject the contents of the document in, removing the scripts
// to avoid any 'Permission Denied' errors in IE
- .append(res.responseText.replace(/<script(.|\s)*?\/script>/g, ""))
+ .append(res.responseText.replace(rscript, ""))
// Locate the specified elements
.find(selector) :
// If not, just inject the full result
res.responseText );
+ }
- if( callback )
+ if ( callback ) {
self.each( callback, [res.responseText, status, res] );
+ }
}
});
+
return this;
},
@@ -3288,40 +4854,41 @@ jQuery.fn.extend({
return jQuery.param(this.serializeArray());
},
serializeArray: function() {
- return this.map(function(){
+ return this.map(function() {
return this.elements ? jQuery.makeArray(this.elements) : this;
})
- .filter(function(){
+ .filter(function() {
return this.name && !this.disabled &&
- (this.checked || /select|textarea/i.test(this.nodeName) ||
- /text|hidden|password|search/i.test(this.type));
+ (this.checked || rselectTextarea.test(this.nodeName) ||
+ rinput.test(this.type));
})
- .map(function(i, elem){
+ .map(function( i, elem ) {
var val = jQuery(this).val();
- return val == null ? null :
+
+ return val == null ?
+ null :
jQuery.isArray(val) ?
- jQuery.map( val, function(val, i){
- return {name: elem.name, value: val};
+ jQuery.map( val, function( val, i ) {
+ return { name: elem.name, value: val };
}) :
- {name: elem.name, value: val};
+ { name: elem.name, value: val };
}).get();
}
});
// Attach a bunch of functions for handling common AJAX events
-jQuery.each( "ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","), function(i,o){
- jQuery.fn[o] = function(f){
+jQuery.each( "ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), function( i, o ) {
+ jQuery.fn[o] = function( f ) {
return this.bind(o, f);
};
});
-var jsc = now();
-
jQuery.extend({
-
+
get: function( url, data, callback, type ) {
- // shift arguments if data argument was ommited
+ // shift arguments if data argument was omited
if ( jQuery.isFunction( data ) ) {
+ type = type || callback;
callback = data;
data = null;
}
@@ -3344,7 +4911,9 @@ jQuery.extend({
},
post: function( url, data, callback, type ) {
+ // shift arguments if data argument was omited
if ( jQuery.isFunction( data ) ) {
+ type = type || callback;
callback = data;
data = {};
}
@@ -3374,13 +4943,21 @@ jQuery.extend({
data: null,
username: null,
password: null,
+ traditional: false,
*/
// Create the request object; Microsoft failed to properly
- // implement the XMLHttpRequest in IE7, so we use the ActiveXObject when it is available
+ // implement the XMLHttpRequest in IE7 (can't request local files),
+ // so we use the ActiveXObject when it is available
// This function can be overriden by calling jQuery.ajaxSetup
- xhr:function(){
- return window.ActiveXObject ? new ActiveXObject("Microsoft.XMLHTTP") : new XMLHttpRequest();
- },
+ xhr: window.XMLHttpRequest && (window.location.protocol !== "file:" || !window.ActiveXObject) ?
+ function() {
+ return new window.XMLHttpRequest();
+ } :
+ function() {
+ try {
+ return new window.ActiveXObject("Microsoft.XMLHTTP");
+ } catch(e) {}
+ },
accepts: {
xml: "application/xml, text/xml",
html: "text/html",
@@ -3393,36 +4970,41 @@ jQuery.extend({
// Last-Modified header cache for next request
lastModified: {},
+ etag: {},
- ajax: function( s ) {
- // Extend the settings, but re-extend 's' so that it can be
- // checked again later (in the test suite, specifically)
- s = jQuery.extend(true, s, jQuery.extend(true, {}, jQuery.ajaxSettings, s));
-
- var jsonp, jsre = /=\?(&|$)/g, status, data,
+ ajax: function( origSettings ) {
+ var s = jQuery.extend(true, {}, jQuery.ajaxSettings, origSettings);
+
+ var jsonp, status, data,
+ callbackContext = origSettings && origSettings.context || s,
type = s.type.toUpperCase();
// convert data if not already a string
- if ( s.data && s.processData && typeof s.data !== "string" )
- s.data = jQuery.param(s.data);
+ if ( s.data && s.processData && typeof s.data !== "string" ) {
+ s.data = jQuery.param( s.data, s.traditional );
+ }
// Handle JSONP Parameter Callbacks
- if ( s.dataType == "jsonp" ) {
- if ( type == "GET" ) {
- if ( !s.url.match(jsre) )
- s.url += (s.url.match(/\?/) ? "&" : "?") + (s.jsonp || "callback") + "=?";
- } else if ( !s.data || !s.data.match(jsre) )
+ if ( s.dataType === "jsonp" ) {
+ if ( type === "GET" ) {
+ if ( !jsre.test( s.url ) ) {
+ s.url += (rquery.test( s.url ) ? "&" : "?") + (s.jsonp || "callback") + "=?";
+ }
+ } else if ( !s.data || !jsre.test(s.data) ) {
s.data = (s.data ? s.data + "&" : "") + (s.jsonp || "callback") + "=?";
+ }
s.dataType = "json";
}
// Build temporary JSONP function
- if ( s.dataType == "json" && (s.data && s.data.match(jsre) || s.url.match(jsre)) ) {
- jsonp = "jsonp" + jsc++;
+ if ( s.dataType === "json" && (s.data && jsre.test(s.data) || jsre.test(s.url)) ) {
+ jsonp = s.jsonpCallback || ("jsonp" + jsc++);
// Replace the =? sequence both in the query string and the data
- if ( s.data )
+ if ( s.data ) {
s.data = (s.data + "").replace(jsre, "=" + jsonp + "$1");
+ }
+
s.url = s.url.replace(jsre, "=" + jsonp + "$1");
// We need to make sure
@@ -3430,75 +5012,85 @@ jQuery.extend({
s.dataType = "script";
// Handle JSONP-style loading
- window[ jsonp ] = function(tmp){
+ window[ jsonp ] = window[ jsonp ] || function( tmp ) {
data = tmp;
success();
complete();
// Garbage collect
window[ jsonp ] = undefined;
- try{ delete window[ jsonp ]; } catch(e){}
- if ( head )
+
+ try {
+ delete window[ jsonp ];
+ } catch(e) {}
+
+ if ( head ) {
head.removeChild( script );
+ }
};
}
- if ( s.dataType == "script" && s.cache == null )
+ if ( s.dataType === "script" && s.cache === null ) {
s.cache = false;
+ }
- if ( s.cache === false && type == "GET" ) {
+ if ( s.cache === false && type === "GET" ) {
var ts = now();
+
// try replacing _= if it is there
- var ret = s.url.replace(/(\?|&)_=.*?(&|$)/, "$1_=" + ts + "$2");
+ var ret = s.url.replace(rts, "$1_=" + ts + "$2");
+
// if nothing was replaced, add timestamp to the end
- s.url = ret + ((ret == s.url) ? (s.url.match(/\?/) ? "&" : "?") + "_=" + ts : "");
+ s.url = ret + ((ret === s.url) ? (rquery.test(s.url) ? "&" : "?") + "_=" + ts : "");
}
// If data is available, append data to url for get requests
- if ( s.data && type == "GET" ) {
- s.url += (s.url.match(/\?/) ? "&" : "?") + s.data;
-
- // IE likes to send both get and post data, prevent this
- s.data = null;
+ if ( s.data && type === "GET" ) {
+ s.url += (rquery.test(s.url) ? "&" : "?") + s.data;
}
// Watch for a new set of requests
- if ( s.global && ! jQuery.active++ )
+ if ( s.global && ! jQuery.active++ ) {
jQuery.event.trigger( "ajaxStart" );
+ }
// Matches an absolute URL, and saves the domain
- var parts = /^(\w+:)?\/\/([^\/?#]+)/.exec( s.url );
+ var parts = rurl.exec( s.url ),
+ remote = parts && (parts[1] && parts[1] !== location.protocol || parts[2] !== location.host);
// If we're requesting a remote document
// and trying to load JSON or Script with a GET
- if ( s.dataType == "script" && type == "GET" && parts
- && ( parts[1] && parts[1] != location.protocol || parts[2] != location.host )){
-
- var head = document.getElementsByTagName("head")[0];
+ if ( s.dataType === "script" && type === "GET" && remote ) {
+ var head = document.getElementsByTagName("head")[0] || document.documentElement;
var script = document.createElement("script");
script.src = s.url;
- if (s.scriptCharset)
+ if ( s.scriptCharset ) {
script.charset = s.scriptCharset;
+ }
// Handle Script loading
if ( !jsonp ) {
var done = false;
// Attach handlers for all browsers
- script.onload = script.onreadystatechange = function(){
+ script.onload = script.onreadystatechange = function() {
if ( !done && (!this.readyState ||
- this.readyState == "loaded" || this.readyState == "complete") ) {
+ this.readyState === "loaded" || this.readyState === "complete") ) {
done = true;
success();
complete();
// Handle memory leak in IE
script.onload = script.onreadystatechange = null;
- head.removeChild( script );
+ if ( head && script.parentNode ) {
+ head.removeChild( script );
+ }
}
};
}
- head.appendChild(script);
+ // Use insertBefore instead of appendChild to circumvent an IE6 bug.
+ // This arises when a base node is used (#2709 and #4378).
+ head.insertBefore( script, head.firstChild );
// We handle everything using the script element injection
return undefined;
@@ -3509,158 +5101,197 @@ jQuery.extend({
// Create the request object
var xhr = s.xhr();
+ if ( !xhr ) {
+ return;
+ }
+
// Open the socket
// Passing null username, generates a login popup on Opera (#2865)
- if( s.username )
+ if ( s.username ) {
xhr.open(type, s.url, s.async, s.username, s.password);
- else
+ } else {
xhr.open(type, s.url, s.async);
+ }
// Need an extra try/catch for cross domain requests in Firefox 3
try {
// Set the correct header, if data is being sent
- if ( s.data )
+ if ( s.data || origSettings && origSettings.contentType ) {
xhr.setRequestHeader("Content-Type", s.contentType);
+ }
+
+ // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode.
+ if ( s.ifModified ) {
+ if ( jQuery.lastModified[s.url] ) {
+ xhr.setRequestHeader("If-Modified-Since", jQuery.lastModified[s.url]);
+ }
- // Set the If-Modified-Since header, if ifModified mode.
- if ( s.ifModified )
- xhr.setRequestHeader("If-Modified-Since",
- jQuery.lastModified[s.url] || "Thu, 01 Jan 1970 00:00:00 GMT" );
+ if ( jQuery.etag[s.url] ) {
+ xhr.setRequestHeader("If-None-Match", jQuery.etag[s.url]);
+ }
+ }
// Set header so the called script knows that it's an XMLHttpRequest
- xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
+ // Only send the header if it's not a remote XHR
+ if ( !remote ) {
+ xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
+ }
// Set the Accepts header for the server, depending on the dataType
xhr.setRequestHeader("Accept", s.dataType && s.accepts[ s.dataType ] ?
s.accepts[ s.dataType ] + ", */*" :
s.accepts._default );
- } catch(e){}
+ } catch(e) {}
// Allow custom headers/mimetypes and early abort
- if ( s.beforeSend && s.beforeSend(xhr, s) === false ) {
+ if ( s.beforeSend && s.beforeSend.call(callbackContext, xhr, s) === false ) {
// Handle the global AJAX counter
- if ( s.global && ! --jQuery.active )
+ if ( s.global && ! --jQuery.active ) {
jQuery.event.trigger( "ajaxStop" );
+ }
+
// close opended socket
xhr.abort();
return false;
}
- if ( s.global )
- jQuery.event.trigger("ajaxSend", [xhr, s]);
+ if ( s.global ) {
+ trigger("ajaxSend", [xhr, s]);
+ }
// Wait for a response to come back
- var onreadystatechange = function(isTimeout){
- // The request was aborted, clear the interval and decrement jQuery.active
- if (xhr.readyState == 0) {
- if (ival) {
- // clear poll interval
- clearInterval(ival);
- ival = null;
- // Handle the global AJAX counter
- if ( s.global && ! --jQuery.active )
- jQuery.event.trigger( "ajaxStop" );
+ var onreadystatechange = xhr.onreadystatechange = function( isTimeout ) {
+ // The request was aborted
+ if ( !xhr || xhr.readyState === 0 || isTimeout === "abort" ) {
+ // Opera doesn't call onreadystatechange before this point
+ // so we simulate the call
+ if ( !requestDone ) {
+ complete();
+ }
+
+ requestDone = true;
+ if ( xhr ) {
+ xhr.onreadystatechange = jQuery.noop;
}
+
// The transfer is complete and the data is available, or the request timed out
- } else if ( !requestDone && xhr && (xhr.readyState == 4 || isTimeout == "timeout") ) {
+ } else if ( !requestDone && xhr && (xhr.readyState === 4 || isTimeout === "timeout") ) {
requestDone = true;
+ xhr.onreadystatechange = jQuery.noop;
- // clear poll interval
- if (ival) {
- clearInterval(ival);
- ival = null;
- }
+ status = isTimeout === "timeout" ?
+ "timeout" :
+ !jQuery.httpSuccess( xhr ) ?
+ "error" :
+ s.ifModified && jQuery.httpNotModified( xhr, s.url ) ?
+ "notmodified" :
+ "success";
- status = isTimeout == "timeout" ? "timeout" :
- !jQuery.httpSuccess( xhr ) ? "error" :
- s.ifModified && jQuery.httpNotModified( xhr, s.url ) ? "notmodified" :
- "success";
+ var errMsg;
- if ( status == "success" ) {
+ if ( status === "success" ) {
// Watch for, and catch, XML document parse errors
try {
// process the data (runs the xml through httpData regardless of callback)
data = jQuery.httpData( xhr, s.dataType, s );
- } catch(e) {
+ } catch(err) {
status = "parsererror";
+ errMsg = err;
}
}
// Make sure that the request was successful or notmodified
- if ( status == "success" ) {
- // Cache Last-Modified header, if ifModified mode.
- var modRes;
- try {
- modRes = xhr.getResponseHeader("Last-Modified");
- } catch(e) {} // swallow exception thrown by FF if header is not available
-
- if ( s.ifModified && modRes )
- jQuery.lastModified[s.url] = modRes;
-
+ if ( status === "success" || status === "notmodified" ) {
// JSONP handles its own success callback
- if ( !jsonp )
+ if ( !jsonp ) {
success();
- } else
- jQuery.handleError(s, xhr, status);
+ }
+ } else {
+ jQuery.handleError(s, xhr, status, errMsg);
+ }
// Fire the complete handlers
complete();
- if ( isTimeout )
+ if ( isTimeout === "timeout" ) {
xhr.abort();
+ }
// Stop memory leaks
- if ( s.async )
+ if ( s.async ) {
xhr = null;
+ }
}
};
- if ( s.async ) {
- // don't attach the handler to the request, just poll it instead
- var ival = setInterval(onreadystatechange, 13);
+ // Override the abort handler, if we can (IE doesn't allow it, but that's OK)
+ // Opera doesn't fire onreadystatechange at all on abort
+ try {
+ var oldAbort = xhr.abort;
+ xhr.abort = function() {
+ if ( xhr ) {
+ oldAbort.call( xhr );
+ }
- // Timeout checker
- if ( s.timeout > 0 )
- setTimeout(function(){
- // Check to see if the request is still happening
- if ( xhr && !requestDone )
- onreadystatechange( "timeout" );
- }, s.timeout);
+ onreadystatechange( "abort" );
+ };
+ } catch(e) { }
+
+ // Timeout checker
+ if ( s.async && s.timeout > 0 ) {
+ setTimeout(function() {
+ // Check to see if the request is still happening
+ if ( xhr && !requestDone ) {
+ onreadystatechange( "timeout" );
+ }
+ }, s.timeout);
}
// Send the data
try {
- xhr.send(s.data);
+ xhr.send( type === "POST" || type === "PUT" || type === "DELETE" ? s.data : null );
} catch(e) {
jQuery.handleError(s, xhr, null, e);
+ // Fire the complete handlers
+ complete();
}
// firefox 1.5 doesn't fire statechange for sync requests
- if ( !s.async )
+ if ( !s.async ) {
onreadystatechange();
+ }
- function success(){
+ function success() {
// If a local callback was specified, fire it and pass it the data
- if ( s.success )
- s.success( data, status );
+ if ( s.success ) {
+ s.success.call( callbackContext, data, status, xhr );
+ }
// Fire the global callback
- if ( s.global )
- jQuery.event.trigger( "ajaxSuccess", [xhr, s] );
+ if ( s.global ) {
+ trigger( "ajaxSuccess", [xhr, s] );
+ }
}
- function complete(){
+ function complete() {
// Process result
- if ( s.complete )
- s.complete(xhr, status);
+ if ( s.complete ) {
+ s.complete.call( callbackContext, xhr, status);
+ }
// The request was completed
- if ( s.global )
- jQuery.event.trigger( "ajaxComplete", [xhr, s] );
+ if ( s.global ) {
+ trigger( "ajaxComplete", [xhr, s] );
+ }
// Handle the global AJAX counter
- if ( s.global && ! --jQuery.active )
+ if ( s.global && ! --jQuery.active ) {
jQuery.event.trigger( "ajaxStop" );
+ }
+ }
+
+ function trigger(type, args) {
+ (s.context ? jQuery(s.context) : jQuery.event).trigger(type, args);
}
// return XMLHttpRequest to allow aborting the request etc.
@@ -3669,11 +5300,14 @@ jQuery.extend({
handleError: function( s, xhr, status, e ) {
// If a local callback was specified, fire it
- if ( s.error ) s.error( xhr, status, e );
+ if ( s.error ) {
+ s.error.call( s.context || s, xhr, status, e );
+ }
// Fire the global callback
- if ( s.global )
- jQuery.event.trigger( "ajaxError", [xhr, s, e] );
+ if ( s.global ) {
+ (s.context ? jQuery(s.context) : jQuery.event).trigger( "ajaxError", [xhr, s, e] );
+ }
},
// Counter for holding the number of active queries
@@ -3683,86 +5317,131 @@ jQuery.extend({
httpSuccess: function( xhr ) {
try {
// IE error sometimes returns 1223 when it should be 204 so treat it as success, see #1450
- return !xhr.status && location.protocol == "file:" ||
- ( xhr.status >= 200 && xhr.status < 300 ) || xhr.status == 304 || xhr.status == 1223;
- } catch(e){}
+ return !xhr.status && location.protocol === "file:" ||
+ // Opera returns 0 when status is 304
+ ( xhr.status >= 200 && xhr.status < 300 ) ||
+ xhr.status === 304 || xhr.status === 1223 || xhr.status === 0;
+ } catch(e) {}
+
return false;
},
// Determines if an XMLHttpRequest returns NotModified
httpNotModified: function( xhr, url ) {
- try {
- var xhrRes = xhr.getResponseHeader("Last-Modified");
+ var lastModified = xhr.getResponseHeader("Last-Modified"),
+ etag = xhr.getResponseHeader("Etag");
- // Firefox always returns 200. check Last-Modified date
- return xhr.status == 304 || xhrRes == jQuery.lastModified[url];
- } catch(e){}
- return false;
+ if ( lastModified ) {
+ jQuery.lastModified[url] = lastModified;
+ }
+
+ if ( etag ) {
+ jQuery.etag[url] = etag;
+ }
+
+ // Opera returns 0 when status is 304
+ return xhr.status === 304 || xhr.status === 0;
},
httpData: function( xhr, type, s ) {
- var ct = xhr.getResponseHeader("content-type"),
- xml = type == "xml" || !type && ct && ct.indexOf("xml") >= 0,
+ var ct = xhr.getResponseHeader("content-type") || "",
+ xml = type === "xml" || !type && ct.indexOf("xml") >= 0,
data = xml ? xhr.responseXML : xhr.responseText;
- if ( xml && data.documentElement.tagName == "parsererror" )
- throw "parsererror";
-
+ if ( xml && data.documentElement.nodeName === "parsererror" ) {
+ jQuery.error( "parsererror" );
+ }
+
// Allow a pre-filtering function to sanitize the response
- // s != null is checked to keep backwards compatibility
- if( s && s.dataFilter )
+ // s is checked to keep backwards compatibility
+ if ( s && s.dataFilter ) {
data = s.dataFilter( data, type );
+ }
// The filter can actually parse the response
- if( typeof data === "string" ){
+ if ( typeof data === "string" ) {
+ // Get the JavaScript object, if JSON is used.
+ if ( type === "json" || !type && ct.indexOf("json") >= 0 ) {
+ data = jQuery.parseJSON( data );
// If the type is "script", eval it in global context
- if ( type == "script" )
+ } else if ( type === "script" || !type && ct.indexOf("javascript") >= 0 ) {
jQuery.globalEval( data );
-
- // Get the JavaScript object, if JSON is used.
- if ( type == "json" )
- data = window["eval"]("(" + data + ")");
+ }
}
-
+
return data;
},
// Serialize an array of form elements or a set of
// key/values into a query string
- param: function( a ) {
- var s = [ ];
-
- function add( key, value ){
- s[ s.length ] = encodeURIComponent(key) + '=' + encodeURIComponent(value);
- };
-
- // If an array was passed in, assume that it is an array
- // of form elements
- if ( jQuery.isArray(a) || a.jquery )
+ param: function( a, traditional ) {
+ var s = [];
+
+ // Set traditional to true for jQuery <= 1.3.2 behavior.
+ if ( traditional === undefined ) {
+ traditional = jQuery.ajaxSettings.traditional;
+ }
+
+ // If an array was passed in, assume that it is an array of form elements.
+ if ( jQuery.isArray(a) || a.jquery ) {
// Serialize the form elements
- jQuery.each( a, function(){
+ jQuery.each( a, function() {
add( this.name, this.value );
});
-
- // Otherwise, assume that it's an object of key/value pairs
- else
- // Serialize the key/values
- for ( var j in a )
- // If the value is an array then the key names need to be repeated
- if ( jQuery.isArray(a[j]) )
- jQuery.each( a[j], function(){
- add( j, this );
- });
- else
- add( j, jQuery.isFunction(a[j]) ? a[j]() : a[j] );
+
+ } else {
+ // If traditional, encode the "old" way (the way 1.3.2 or older
+ // did it), otherwise encode params recursively.
+ for ( var prefix in a ) {
+ buildParams( prefix, a[prefix] );
+ }
+ }
// Return the resulting serialization
- return s.join("&").replace(/%20/g, "+");
- }
+ return s.join("&").replace(r20, "+");
+
+ function buildParams( prefix, obj ) {
+ if ( jQuery.isArray(obj) ) {
+ // Serialize array item.
+ jQuery.each( obj, function( i, v ) {
+ if ( traditional || /\[\]$/.test( prefix ) ) {
+ // Treat each array item as a scalar.
+ add( prefix, v );
+ } else {
+ // If array item is non-scalar (array or object), encode its
+ // numeric index to resolve deserialization ambiguity issues.
+ // Note that rack (as of 1.0.0) can't currently deserialize
+ // nested arrays properly, and attempting to do so may cause
+ // a server error. Possible fixes are to modify rack's
+ // deserialization algorithm or to provide an option or flag
+ // to force array serialization to be shallow.
+ buildParams( prefix + "[" + ( typeof v === "object" || jQuery.isArray(v) ? i : "" ) + "]", v );
+ }
+ });
+
+ } else if ( !traditional && obj != null && typeof obj === "object" ) {
+ // Serialize object item.
+ jQuery.each( obj, function( k, v ) {
+ buildParams( prefix + "[" + k + "]", v );
+ });
+
+ } else {
+ // Serialize scalar item.
+ add( prefix, obj );
+ }
+ }
+ function add( key, value ) {
+ // If value is a function, invoke it and return its value
+ value = jQuery.isFunction(value) ? value() : value;
+ s[ s.length ] = encodeURIComponent(key) + "=" + encodeURIComponent(value);
+ }
+ }
});
var elemdisplay = {},
+ rfxtypes = /toggle|show|hide/,
+ rfxnum = /^([+-]=)?([\d+-.]+)(.*)$/,
timerId,
fxAttrs = [
// height animations
@@ -3773,69 +5452,67 @@ var elemdisplay = {},
[ "opacity" ]
];
-function genFx( type, num ){
- var obj = {};
- jQuery.each( fxAttrs.concat.apply([], fxAttrs.slice(0,num)), function(){
- obj[ this ] = type;
- });
- return obj;
-}
-
jQuery.fn.extend({
- show: function(speed,callback){
- if ( speed ) {
+ show: function( speed, callback ) {
+ if ( speed || speed === 0) {
return this.animate( genFx("show", 3), speed, callback);
+
} else {
- for ( var i = 0, l = this.length; i < l; i++ ){
+ for ( var i = 0, l = this.length; i < l; i++ ) {
var old = jQuery.data(this[i], "olddisplay");
-
+
this[i].style.display = old || "";
-
+
if ( jQuery.css(this[i], "display") === "none" ) {
- var tagName = this[i].tagName, display;
-
- if ( elemdisplay[ tagName ] ) {
- display = elemdisplay[ tagName ];
+ var nodeName = this[i].nodeName, display;
+
+ if ( elemdisplay[ nodeName ] ) {
+ display = elemdisplay[ nodeName ];
+
} else {
- var elem = jQuery("<" + tagName + " />").appendTo("body");
-
+ var elem = jQuery("<" + nodeName + " />").appendTo("body");
+
display = elem.css("display");
- if ( display === "none" )
+
+ if ( display === "none" ) {
display = "block";
-
+ }
+
elem.remove();
-
- elemdisplay[ tagName ] = display;
+
+ elemdisplay[ nodeName ] = display;
}
-
+
jQuery.data(this[i], "olddisplay", display);
}
}
// Set the display of the elements in a second loop
// to avoid the constant reflow
- for ( var i = 0, l = this.length; i < l; i++ ){
- this[i].style.display = jQuery.data(this[i], "olddisplay") || "";
+ for ( var j = 0, k = this.length; j < k; j++ ) {
+ this[j].style.display = jQuery.data(this[j], "olddisplay") || "";
}
-
+
return this;
}
},
- hide: function(speed,callback){
- if ( speed ) {
+ hide: function( speed, callback ) {
+ if ( speed || speed === 0 ) {
return this.animate( genFx("hide", 3), speed, callback);
+
} else {
- for ( var i = 0, l = this.length; i < l; i++ ){
+ for ( var i = 0, l = this.length; i < l; i++ ) {
var old = jQuery.data(this[i], "olddisplay");
- if ( !old && old !== "none" )
+ if ( !old && old !== "none" ) {
jQuery.data(this[i], "olddisplay", jQuery.css(this[i], "display"));
+ }
}
// Set the display of the elements in a second loop
// to avoid the constant reflow
- for ( var i = 0, l = this.length; i < l; i++ ){
- this[i].style.display = "none";
+ for ( var j = 0, k = this.length; j < k; j++ ) {
+ this[j].style.display = "none";
}
return this;
@@ -3845,77 +5522,107 @@ jQuery.fn.extend({
// Save the old toggle function
_toggle: jQuery.fn.toggle,
- toggle: function( fn, fn2 ){
+ toggle: function( fn, fn2 ) {
var bool = typeof fn === "boolean";
- return jQuery.isFunction(fn) && jQuery.isFunction(fn2) ?
- this._toggle.apply( this, arguments ) :
- fn == null || bool ?
- this.each(function(){
- var state = bool ? fn : jQuery(this).is(":hidden");
- jQuery(this)[ state ? "show" : "hide" ]();
- }) :
- this.animate(genFx("toggle", 3), fn, fn2);
+ if ( jQuery.isFunction(fn) && jQuery.isFunction(fn2) ) {
+ this._toggle.apply( this, arguments );
+
+ } else if ( fn == null || bool ) {
+ this.each(function() {
+ var state = bool ? fn : jQuery(this).is(":hidden");
+ jQuery(this)[ state ? "show" : "hide" ]();
+ });
+
+ } else {
+ this.animate(genFx("toggle", 3), fn, fn2);
+ }
+
+ return this;
},
- fadeTo: function(speed,to,callback){
- return this.animate({opacity: to}, speed, callback);
+ fadeTo: function( speed, to, callback ) {
+ return this.filter(":hidden").css("opacity", 0).show().end()
+ .animate({opacity: to}, speed, callback);
},
animate: function( prop, speed, easing, callback ) {
var optall = jQuery.speed(speed, easing, callback);
- return this[ optall.queue === false ? "each" : "queue" ](function(){
-
+ if ( jQuery.isEmptyObject( prop ) ) {
+ return this.each( optall.complete );
+ }
+
+ return this[ optall.queue === false ? "each" : "queue" ](function() {
var opt = jQuery.extend({}, optall), p,
- hidden = this.nodeType == 1 && jQuery(this).is(":hidden"),
+ hidden = this.nodeType === 1 && jQuery(this).is(":hidden"),
self = this;
-
+
for ( p in prop ) {
- if ( prop[p] == "hide" && hidden || prop[p] == "show" && !hidden )
+ var name = p.replace(rdashAlpha, fcamelCase);
+
+ if ( p !== name ) {
+ prop[ name ] = prop[ p ];
+ delete prop[ p ];
+ p = name;
+ }
+
+ if ( prop[p] === "hide" && hidden || prop[p] === "show" && !hidden ) {
return opt.complete.call(this);
+ }
- if ( ( p == "height" || p == "width" ) && this.style ) {
+ if ( ( p === "height" || p === "width" ) && this.style ) {
// Store display property
opt.display = jQuery.css(this, "display");
// Make sure that nothing sneaks out
opt.overflow = this.style.overflow;
}
+
+ if ( jQuery.isArray( prop[p] ) ) {
+ // Create (if needed) and add to specialEasing
+ (opt.specialEasing = opt.specialEasing || {})[p] = prop[p][1];
+ prop[p] = prop[p][0];
+ }
}
- if ( opt.overflow != null )
+ if ( opt.overflow != null ) {
this.style.overflow = "hidden";
+ }
opt.curAnim = jQuery.extend({}, prop);
- jQuery.each( prop, function(name, val){
+ jQuery.each( prop, function( name, val ) {
var e = new jQuery.fx( self, opt, name );
- if ( /toggle|show|hide/.test(val) )
- e[ val == "toggle" ? hidden ? "show" : "hide" : val ]( prop );
- else {
- var parts = val.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),
+ if ( rfxtypes.test(val) ) {
+ e[ val === "toggle" ? hidden ? "show" : "hide" : val ]( prop );
+
+ } else {
+ var parts = rfxnum.exec(val),
start = e.cur(true) || 0;
if ( parts ) {
- var end = parseFloat(parts[2]),
+ var end = parseFloat( parts[2] ),
unit = parts[3] || "px";
// We need to compute starting value
- if ( unit != "px" ) {
+ if ( unit !== "px" ) {
self.style[ name ] = (end || 1) + unit;
start = ((end || 1) / e.cur(true)) * start;
self.style[ name ] = start + unit;
}
// If a +=/-= token was provided, we're doing a relative animation
- if ( parts[1] )
- end = ((parts[1] == "-=" ? -1 : 1) * end) + start;
+ if ( parts[1] ) {
+ end = ((parts[1] === "-=" ? -1 : 1) * end) + start;
+ }
e.custom( start, end, unit );
- } else
+
+ } else {
e.custom( start, val, "" );
+ }
}
});
@@ -3924,26 +5631,31 @@ jQuery.fn.extend({
});
},
- stop: function(clearQueue, gotoEnd){
+ stop: function( clearQueue, gotoEnd ) {
var timers = jQuery.timers;
- if (clearQueue)
+ if ( clearQueue ) {
this.queue([]);
+ }
- this.each(function(){
+ this.each(function() {
// go in reverse order so anything added to the queue during the loop is ignored
- for ( var i = timers.length - 1; i >= 0; i-- )
- if ( timers[i].elem == this ) {
- if (gotoEnd)
+ for ( var i = timers.length - 1; i >= 0; i-- ) {
+ if ( timers[i].elem === this ) {
+ if (gotoEnd) {
// force the next step to be the last
timers[i](true);
+ }
+
timers.splice(i, 1);
}
+ }
});
// start the next in the queue if the last step wasn't forced
- if (!gotoEnd)
+ if ( !gotoEnd ) {
this.dequeue();
+ }
return this;
}
@@ -3957,16 +5669,15 @@ jQuery.each({
slideToggle: genFx("toggle", 1),
fadeIn: { opacity: "show" },
fadeOut: { opacity: "hide" }
-}, function( name, props ){
- jQuery.fn[ name ] = function( speed, callback ){
+}, function( name, props ) {
+ jQuery.fn[ name ] = function( speed, callback ) {
return this.animate( props, speed, callback );
};
});
jQuery.extend({
-
- speed: function(speed, easing, fn) {
- var opt = typeof speed === "object" ? speed : {
+ speed: function( speed, easing, fn ) {
+ var opt = speed && typeof speed === "object" ? speed : {
complete: fn || !fn && easing ||
jQuery.isFunction( speed ) && speed,
duration: speed,
@@ -3978,11 +5689,13 @@ jQuery.extend({
// Queueing
opt.old = opt.complete;
- opt.complete = function(){
- if ( opt.queue !== false )
+ opt.complete = function() {
+ if ( opt.queue !== false ) {
jQuery(this).dequeue();
- if ( jQuery.isFunction( opt.old ) )
+ }
+ if ( jQuery.isFunction( opt.old ) ) {
opt.old.call( this );
+ }
};
return opt;
@@ -3999,42 +5712,45 @@ jQuery.extend({
timers: [],
- fx: function( elem, options, prop ){
+ fx: function( elem, options, prop ) {
this.options = options;
this.elem = elem;
this.prop = prop;
- if ( !options.orig )
+ if ( !options.orig ) {
options.orig = {};
+ }
}
});
jQuery.fx.prototype = {
-
// Simple function for setting a style value
- update: function(){
- if ( this.options.step )
+ update: function() {
+ if ( this.options.step ) {
this.options.step.call( this.elem, this.now, this );
+ }
(jQuery.fx.step[this.prop] || jQuery.fx.step._default)( this );
// Set display property to block for height/width animations
- if ( ( this.prop == "height" || this.prop == "width" ) && this.elem.style )
+ if ( ( this.prop === "height" || this.prop === "width" ) && this.elem.style ) {
this.elem.style.display = "block";
+ }
},
// Get the current size
- cur: function(force){
- if ( this.elem[this.prop] != null && (!this.elem.style || this.elem.style[this.prop] == null) )
+ cur: function( force ) {
+ if ( this.elem[this.prop] != null && (!this.elem.style || this.elem.style[this.prop] == null) ) {
return this.elem[ this.prop ];
+ }
var r = parseFloat(jQuery.css(this.elem, this.prop, force));
return r && r > -10000 ? r : parseFloat(jQuery.curCSS(this.elem, this.prop)) || 0;
},
// Start an animation from one number to another
- custom: function(from, to, unit){
+ custom: function( from, to, unit ) {
this.startTime = now();
this.start = from;
this.end = to;
@@ -4043,47 +5759,36 @@ jQuery.fx.prototype = {
this.pos = this.state = 0;
var self = this;
- function t(gotoEnd){
+ function t( gotoEnd ) {
return self.step(gotoEnd);
}
t.elem = this.elem;
if ( t() && jQuery.timers.push(t) && !timerId ) {
- timerId = setInterval(function(){
- var timers = jQuery.timers;
-
- for ( var i = 0; i < timers.length; i++ )
- if ( !timers[i]() )
- timers.splice(i--, 1);
-
- if ( !timers.length ) {
- clearInterval( timerId );
- timerId = undefined;
- }
- }, 13);
+ timerId = setInterval(jQuery.fx.tick, 13);
}
},
// Simple 'show' function
- show: function(){
+ show: function() {
// Remember where we started, so that we can go back to it later
- this.options.orig[this.prop] = jQuery.attr( this.elem.style, this.prop );
+ this.options.orig[this.prop] = jQuery.style( this.elem, this.prop );
this.options.show = true;
// Begin the animation
// Make sure that we start at a small width/height to avoid any
// flash of content
- this.custom(this.prop == "width" || this.prop == "height" ? 1 : 0, this.cur());
+ this.custom(this.prop === "width" || this.prop === "height" ? 1 : 0, this.cur());
// Start by showing the element
- jQuery(this.elem).show();
+ jQuery( this.elem ).show();
},
// Simple 'hide' function
- hide: function(){
+ hide: function() {
// Remember where we started, so that we can go back to it later
- this.options.orig[this.prop] = jQuery.attr( this.elem.style, this.prop );
+ this.options.orig[this.prop] = jQuery.style( this.elem, this.prop );
this.options.hide = true;
// Begin the animation
@@ -4091,8 +5796,8 @@ jQuery.fx.prototype = {
},
// Each step of an animation
- step: function(gotoEnd){
- var t = now();
+ step: function( gotoEnd ) {
+ var t = now(), done = true;
if ( gotoEnd || t >= this.options.duration + this.startTime ) {
this.now = this.end;
@@ -4101,10 +5806,11 @@ jQuery.fx.prototype = {
this.options.curAnim[ this.prop ] = true;
- var done = true;
- for ( var i in this.options.curAnim )
- if ( this.options.curAnim[i] !== true )
+ for ( var i in this.options.curAnim ) {
+ if ( this.options.curAnim[i] !== true ) {
done = false;
+ }
+ }
if ( done ) {
if ( this.options.display != null ) {
@@ -4112,31 +5818,40 @@ jQuery.fx.prototype = {
this.elem.style.overflow = this.options.overflow;
// Reset the display
- this.elem.style.display = this.options.display;
- if ( jQuery.css(this.elem, "display") == "none" )
+ var old = jQuery.data(this.elem, "olddisplay");
+ this.elem.style.display = old ? old : this.options.display;
+
+ if ( jQuery.css(this.elem, "display") === "none" ) {
this.elem.style.display = "block";
+ }
}
// Hide the element if the "hide" operation was done
- if ( this.options.hide )
+ if ( this.options.hide ) {
jQuery(this.elem).hide();
+ }
// Reset the properties, if the item has been hidden or shown
- if ( this.options.hide || this.options.show )
- for ( var p in this.options.curAnim )
- jQuery.attr(this.elem.style, p, this.options.orig[p]);
-
+ if ( this.options.hide || this.options.show ) {
+ for ( var p in this.options.curAnim ) {
+ jQuery.style(this.elem, p, this.options.orig[p]);
+ }
+ }
+
// Execute the complete function
this.options.complete.call( this.elem );
}
return false;
+
} else {
var n = t - this.startTime;
this.state = n / this.options.duration;
// Perform the easing function, defaults to swing
- this.pos = jQuery.easing[this.options.easing || (jQuery.easing.swing ? "swing" : "linear")](this.state, n, 0, 1, this.options.duration);
+ var specialEasing = this.options.specialEasing && this.options.specialEasing[this.prop];
+ var defaultEasing = this.options.easing || (jQuery.easing.swing ? "swing" : "linear");
+ this.pos = jQuery.easing[specialEasing || defaultEasing](this.state, n, 0, 1, this.options.duration);
this.now = this.start + ((this.end - this.start) * this.pos);
// Perform the next step of the animation
@@ -4145,232 +5860,381 @@ jQuery.fx.prototype = {
return true;
}
-
};
jQuery.extend( jQuery.fx, {
- speeds:{
+ tick: function() {
+ var timers = jQuery.timers;
+
+ for ( var i = 0; i < timers.length; i++ ) {
+ if ( !timers[i]() ) {
+ timers.splice(i--, 1);
+ }
+ }
+
+ if ( !timers.length ) {
+ jQuery.fx.stop();
+ }
+ },
+
+ stop: function() {
+ clearInterval( timerId );
+ timerId = null;
+ },
+
+ speeds: {
slow: 600,
fast: 200,
// Default speed
_default: 400
},
- step: {
- opacity: function(fx){
- jQuery.attr(fx.elem.style, "opacity", fx.now);
+ step: {
+ opacity: function( fx ) {
+ jQuery.style(fx.elem, "opacity", fx.now);
},
- _default: function(fx){
- if ( fx.elem.style && fx.elem.style[ fx.prop ] != null )
- fx.elem.style[ fx.prop ] = fx.now + fx.unit;
- else
+ _default: function( fx ) {
+ if ( fx.elem.style && fx.elem.style[ fx.prop ] != null ) {
+ fx.elem.style[ fx.prop ] = (fx.prop === "width" || fx.prop === "height" ? Math.max(0, fx.now) : fx.now) + fx.unit;
+ } else {
fx.elem[ fx.prop ] = fx.now;
+ }
}
}
});
-if ( document.documentElement["getBoundingClientRect"] )
- jQuery.fn.offset = function() {
- if ( !this[0] ) return { top: 0, left: 0 };
- if ( this[0] === this[0].ownerDocument.body ) return jQuery.offset.bodyOffset( this[0] );
- var box = this[0].getBoundingClientRect(), doc = this[0].ownerDocument, body = doc.body, docElem = doc.documentElement,
+
+if ( jQuery.expr && jQuery.expr.filters ) {
+ jQuery.expr.filters.animated = function( elem ) {
+ return jQuery.grep(jQuery.timers, function( fn ) {
+ return elem === fn.elem;
+ }).length;
+ };
+}
+
+function genFx( type, num ) {
+ var obj = {};
+
+ jQuery.each( fxAttrs.concat.apply([], fxAttrs.slice(0,num)), function() {
+ obj[ this ] = type;
+ });
+
+ return obj;
+}
+if ( "getBoundingClientRect" in document.documentElement ) {
+ jQuery.fn.offset = function( options ) {
+ var elem = this[0];
+
+ if ( options ) {
+ return this.each(function( i ) {
+ jQuery.offset.setOffset( this, options, i );
+ });
+ }
+
+ if ( !elem || !elem.ownerDocument ) {
+ return null;
+ }
+
+ if ( elem === elem.ownerDocument.body ) {
+ return jQuery.offset.bodyOffset( elem );
+ }
+
+ var box = elem.getBoundingClientRect(), doc = elem.ownerDocument, body = doc.body, docElem = doc.documentElement,
clientTop = docElem.clientTop || body.clientTop || 0, clientLeft = docElem.clientLeft || body.clientLeft || 0,
- top = box.top + (self.pageYOffset || jQuery.boxModel && docElem.scrollTop || body.scrollTop ) - clientTop,
- left = box.left + (self.pageXOffset || jQuery.boxModel && docElem.scrollLeft || body.scrollLeft) - clientLeft;
+ top = box.top + (self.pageYOffset || jQuery.support.boxModel && docElem.scrollTop || body.scrollTop ) - clientTop,
+ left = box.left + (self.pageXOffset || jQuery.support.boxModel && docElem.scrollLeft || body.scrollLeft) - clientLeft;
+
return { top: top, left: left };
};
-else
- jQuery.fn.offset = function() {
- if ( !this[0] ) return { top: 0, left: 0 };
- if ( this[0] === this[0].ownerDocument.body ) return jQuery.offset.bodyOffset( this[0] );
- jQuery.offset.initialized || jQuery.offset.initialize();
- var elem = this[0], offsetParent = elem.offsetParent, prevOffsetParent = elem,
+} else {
+ jQuery.fn.offset = function( options ) {
+ var elem = this[0];
+
+ if ( options ) {
+ return this.each(function( i ) {
+ jQuery.offset.setOffset( this, options, i );
+ });
+ }
+
+ if ( !elem || !elem.ownerDocument ) {
+ return null;
+ }
+
+ if ( elem === elem.ownerDocument.body ) {
+ return jQuery.offset.bodyOffset( elem );
+ }
+
+ jQuery.offset.initialize();
+
+ var offsetParent = elem.offsetParent, prevOffsetParent = elem,
doc = elem.ownerDocument, computedStyle, docElem = doc.documentElement,
body = doc.body, defaultView = doc.defaultView,
- prevComputedStyle = defaultView.getComputedStyle(elem, null),
+ prevComputedStyle = defaultView ? defaultView.getComputedStyle( elem, null ) : elem.currentStyle,
top = elem.offsetTop, left = elem.offsetLeft;
while ( (elem = elem.parentNode) && elem !== body && elem !== docElem ) {
- computedStyle = defaultView.getComputedStyle(elem, null);
- top -= elem.scrollTop, left -= elem.scrollLeft;
+ if ( jQuery.offset.supportsFixedPosition && prevComputedStyle.position === "fixed" ) {
+ break;
+ }
+
+ computedStyle = defaultView ? defaultView.getComputedStyle(elem, null) : elem.currentStyle;
+ top -= elem.scrollTop;
+ left -= elem.scrollLeft;
+
if ( elem === offsetParent ) {
- top += elem.offsetTop, left += elem.offsetLeft;
- if ( jQuery.offset.doesNotAddBorder && !(jQuery.offset.doesAddBorderForTableAndCells && /^t(able|d|h)$/i.test(elem.tagName)) )
- top += parseInt( computedStyle.borderTopWidth, 10) || 0,
- left += parseInt( computedStyle.borderLeftWidth, 10) || 0;
+ top += elem.offsetTop;
+ left += elem.offsetLeft;
+
+ if ( jQuery.offset.doesNotAddBorder && !(jQuery.offset.doesAddBorderForTableAndCells && /^t(able|d|h)$/i.test(elem.nodeName)) ) {
+ top += parseFloat( computedStyle.borderTopWidth ) || 0;
+ left += parseFloat( computedStyle.borderLeftWidth ) || 0;
+ }
+
prevOffsetParent = offsetParent, offsetParent = elem.offsetParent;
}
- if ( jQuery.offset.subtractsBorderForOverflowNotVisible && computedStyle.overflow !== "visible" )
- top += parseInt( computedStyle.borderTopWidth, 10) || 0,
- left += parseInt( computedStyle.borderLeftWidth, 10) || 0;
+
+ if ( jQuery.offset.subtractsBorderForOverflowNotVisible && computedStyle.overflow !== "visible" ) {
+ top += parseFloat( computedStyle.borderTopWidth ) || 0;
+ left += parseFloat( computedStyle.borderLeftWidth ) || 0;
+ }
+
prevComputedStyle = computedStyle;
}
- if ( prevComputedStyle.position === "relative" || prevComputedStyle.position === "static" )
- top += body.offsetTop,
+ if ( prevComputedStyle.position === "relative" || prevComputedStyle.position === "static" ) {
+ top += body.offsetTop;
left += body.offsetLeft;
+ }
- if ( prevComputedStyle.position === "fixed" )
- top += Math.max(docElem.scrollTop, body.scrollTop),
- left += Math.max(docElem.scrollLeft, body.scrollLeft);
+ if ( jQuery.offset.supportsFixedPosition && prevComputedStyle.position === "fixed" ) {
+ top += Math.max( docElem.scrollTop, body.scrollTop );
+ left += Math.max( docElem.scrollLeft, body.scrollLeft );
+ }
return { top: top, left: left };
};
+}
jQuery.offset = {
initialize: function() {
- if ( this.initialized ) return;
- var body = document.body, container = document.createElement('div'), innerDiv, checkDiv, table, td, rules, prop, bodyMarginTop = body.style.marginTop,
- html = '<div style="position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;"><div></div></div><table style="position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;" cellpadding="0" cellspacing="0"><tr><td></td></tr></table>';
+ var body = document.body, container = document.createElement("div"), innerDiv, checkDiv, table, td, bodyMarginTop = parseFloat( jQuery.curCSS(body, "marginTop", true) ) || 0,
+ html = "<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
- rules = { position: 'absolute', top: 0, left: 0, margin: 0, border: 0, width: '1px', height: '1px', visibility: 'hidden' };
- for ( prop in rules ) container.style[prop] = rules[prop];
+ jQuery.extend( container.style, { position: "absolute", top: 0, left: 0, margin: 0, border: 0, width: "1px", height: "1px", visibility: "hidden" } );
container.innerHTML = html;
- body.insertBefore(container, body.firstChild);
- innerDiv = container.firstChild, checkDiv = innerDiv.firstChild, td = innerDiv.nextSibling.firstChild.firstChild;
+ body.insertBefore( container, body.firstChild );
+ innerDiv = container.firstChild;
+ checkDiv = innerDiv.firstChild;
+ td = innerDiv.nextSibling.firstChild.firstChild;
this.doesNotAddBorder = (checkDiv.offsetTop !== 5);
this.doesAddBorderForTableAndCells = (td.offsetTop === 5);
- innerDiv.style.overflow = 'hidden', innerDiv.style.position = 'relative';
+ checkDiv.style.position = "fixed", checkDiv.style.top = "20px";
+ // safari subtracts parent border width here which is 5px
+ this.supportsFixedPosition = (checkDiv.offsetTop === 20 || checkDiv.offsetTop === 15);
+ checkDiv.style.position = checkDiv.style.top = "";
+
+ innerDiv.style.overflow = "hidden", innerDiv.style.position = "relative";
this.subtractsBorderForOverflowNotVisible = (checkDiv.offsetTop === -5);
- body.style.marginTop = '1px';
- this.doesNotIncludeMarginInBodyOffset = (body.offsetTop === 0);
- body.style.marginTop = bodyMarginTop;
+ this.doesNotIncludeMarginInBodyOffset = (body.offsetTop !== bodyMarginTop);
- body.removeChild(container);
- this.initialized = true;
+ body.removeChild( container );
+ body = container = innerDiv = checkDiv = table = td = null;
+ jQuery.offset.initialize = jQuery.noop;
},
- bodyOffset: function(body) {
- jQuery.offset.initialized || jQuery.offset.initialize();
+ bodyOffset: function( body ) {
var top = body.offsetTop, left = body.offsetLeft;
- if ( jQuery.offset.doesNotIncludeMarginInBodyOffset )
- top += parseInt( jQuery.curCSS(body, 'marginTop', true), 10 ) || 0,
- left += parseInt( jQuery.curCSS(body, 'marginLeft', true), 10 ) || 0;
+
+ jQuery.offset.initialize();
+
+ if ( jQuery.offset.doesNotIncludeMarginInBodyOffset ) {
+ top += parseFloat( jQuery.curCSS(body, "marginTop", true) ) || 0;
+ left += parseFloat( jQuery.curCSS(body, "marginLeft", true) ) || 0;
+ }
+
return { top: top, left: left };
+ },
+
+ setOffset: function( elem, options, i ) {
+ // set position first, in-case top/left are set even on static elem
+ if ( /static/.test( jQuery.curCSS( elem, "position" ) ) ) {
+ elem.style.position = "relative";
+ }
+ var curElem = jQuery( elem ),
+ curOffset = curElem.offset(),
+ curTop = parseInt( jQuery.curCSS( elem, "top", true ), 10 ) || 0,
+ curLeft = parseInt( jQuery.curCSS( elem, "left", true ), 10 ) || 0;
+
+ if ( jQuery.isFunction( options ) ) {
+ options = options.call( elem, i, curOffset );
+ }
+
+ var props = {
+ top: (options.top - curOffset.top) + curTop,
+ left: (options.left - curOffset.left) + curLeft
+ };
+
+ if ( "using" in options ) {
+ options.using.call( elem, props );
+ } else {
+ curElem.css( props );
+ }
}
};
jQuery.fn.extend({
position: function() {
- var left = 0, top = 0, results;
-
- if ( this[0] ) {
- // Get *real* offsetParent
- var offsetParent = this.offsetParent(),
-
- // Get correct offsets
- offset = this.offset(),
- parentOffset = /^body|html$/i.test(offsetParent[0].tagName) ? { top: 0, left: 0 } : offsetParent.offset();
-
- // Subtract element margins
- // note: when an element has margin: auto the offsetLeft and marginLeft
- // are the same in Safari causing offset.left to incorrectly be 0
- offset.top -= num( this, 'marginTop' );
- offset.left -= num( this, 'marginLeft' );
-
- // Add offsetParent borders
- parentOffset.top += num( offsetParent, 'borderTopWidth' );
- parentOffset.left += num( offsetParent, 'borderLeftWidth' );
-
- // Subtract the two offsets
- results = {
- top: offset.top - parentOffset.top,
- left: offset.left - parentOffset.left
- };
+ if ( !this[0] ) {
+ return null;
}
- return results;
+ var elem = this[0],
+
+ // Get *real* offsetParent
+ offsetParent = this.offsetParent(),
+
+ // Get correct offsets
+ offset = this.offset(),
+ parentOffset = /^body|html$/i.test(offsetParent[0].nodeName) ? { top: 0, left: 0 } : offsetParent.offset();
+
+ // Subtract element margins
+ // note: when an element has margin: auto the offsetLeft and marginLeft
+ // are the same in Safari causing offset.left to incorrectly be 0
+ offset.top -= parseFloat( jQuery.curCSS(elem, "marginTop", true) ) || 0;
+ offset.left -= parseFloat( jQuery.curCSS(elem, "marginLeft", true) ) || 0;
+
+ // Add offsetParent borders
+ parentOffset.top += parseFloat( jQuery.curCSS(offsetParent[0], "borderTopWidth", true) ) || 0;
+ parentOffset.left += parseFloat( jQuery.curCSS(offsetParent[0], "borderLeftWidth", true) ) || 0;
+
+ // Subtract the two offsets
+ return {
+ top: offset.top - parentOffset.top,
+ left: offset.left - parentOffset.left
+ };
},
offsetParent: function() {
- var offsetParent = this[0].offsetParent || document.body;
- while ( offsetParent && (!/^body|html$/i.test(offsetParent.tagName) && jQuery.css(offsetParent, 'position') == 'static') )
- offsetParent = offsetParent.offsetParent;
- return jQuery(offsetParent);
+ return this.map(function() {
+ var offsetParent = this.offsetParent || document.body;
+ while ( offsetParent && (!/^body|html$/i.test(offsetParent.nodeName) && jQuery.css(offsetParent, "position") === "static") ) {
+ offsetParent = offsetParent.offsetParent;
+ }
+ return offsetParent;
+ });
}
});
// Create scrollLeft and scrollTop methods
-jQuery.each( ['Left', 'Top'], function(i, name) {
- var method = 'scroll' + name;
-
- jQuery.fn[ method ] = function(val) {
- if (!this[0]) return null;
+jQuery.each( ["Left", "Top"], function( i, name ) {
+ var method = "scroll" + name;
- return val !== undefined ?
+ jQuery.fn[ method ] = function(val) {
+ var elem = this[0], win;
+
+ if ( !elem ) {
+ return null;
+ }
+ if ( val !== undefined ) {
// Set the scroll offset
- this.each(function() {
- this == window || this == document ?
- window.scrollTo(
- !i ? val : jQuery(window).scrollLeft(),
- i ? val : jQuery(window).scrollTop()
- ) :
+ return this.each(function() {
+ win = getWindow( this );
+
+ if ( win ) {
+ win.scrollTo(
+ !i ? val : jQuery(win).scrollLeft(),
+ i ? val : jQuery(win).scrollTop()
+ );
+
+ } else {
this[ method ] = val;
- }) :
+ }
+ });
+ } else {
+ win = getWindow( elem );
// Return the scroll offset
- this[0] == window || this[0] == document ?
- self[ i ? 'pageYOffset' : 'pageXOffset' ] ||
- jQuery.boxModel && document.documentElement[ method ] ||
- document.body[ method ] :
- this[0][ method ];
+ return win ? ("pageXOffset" in win) ? win[ i ? "pageYOffset" : "pageXOffset" ] :
+ jQuery.support.boxModel && win.document.documentElement[ method ] ||
+ win.document.body[ method ] :
+ elem[ method ];
+ }
};
});
+
+function getWindow( elem ) {
+ return ("scrollTo" in elem && elem.document) ?
+ elem :
+ elem.nodeType === 9 ?
+ elem.defaultView || elem.parentWindow :
+ false;
+}
// Create innerHeight, innerWidth, outerHeight and outerWidth methods
-jQuery.each([ "Height", "Width" ], function(i, name){
+jQuery.each([ "Height", "Width" ], function( i, name ) {
- var tl = i ? "Left" : "Top", // top or left
- br = i ? "Right" : "Bottom", // bottom or right
- lower = name.toLowerCase();
+ var type = name.toLowerCase();
// innerHeight and innerWidth
- jQuery.fn["inner" + name] = function(){
+ jQuery.fn["inner" + name] = function() {
return this[0] ?
- jQuery.css( this[0], lower, false, "padding" ) :
+ jQuery.css( this[0], type, false, "padding" ) :
null;
};
// outerHeight and outerWidth
- jQuery.fn["outer" + name] = function(margin) {
+ jQuery.fn["outer" + name] = function( margin ) {
return this[0] ?
- jQuery.css( this[0], lower, false, margin ? "margin" : "border" ) :
+ jQuery.css( this[0], type, false, margin ? "margin" : "border" ) :
null;
};
-
- var type = name.toLowerCase();
jQuery.fn[ type ] = function( size ) {
// Get window width or height
- return this[0] == window ?
+ var elem = this[0];
+ if ( !elem ) {
+ return size == null ? null : this;
+ }
+
+ if ( jQuery.isFunction( size ) ) {
+ return this.each(function( i ) {
+ var self = jQuery( this );
+ self[ type ]( size.call( this, i, self[ type ]() ) );
+ });
+ }
+
+ return ("scrollTo" in elem && elem.document) ? // does it walk and quack like a window?
// Everyone else use document.documentElement or document.body depending on Quirks vs Standards mode
- document.compatMode == "CSS1Compat" && document.documentElement[ "client" + name ] ||
- document.body[ "client" + name ] :
+ elem.document.compatMode === "CSS1Compat" && elem.document.documentElement[ "client" + name ] ||
+ elem.document.body[ "client" + name ] :
// Get document width or height
- this[0] == document ?
+ (elem.nodeType === 9) ? // is it a document
// Either scroll[Width/Height] or offset[Width/Height], whichever is greater
Math.max(
- document.documentElement["client" + name],
- document.body["scroll" + name], document.documentElement["scroll" + name],
- document.body["offset" + name], document.documentElement["offset" + name]
+ elem.documentElement["client" + name],
+ elem.body["scroll" + name], elem.documentElement["scroll" + name],
+ elem.body["offset" + name], elem.documentElement["offset" + name]
) :
// Get or set width or height on the element
size === undefined ?
// Get width or height on the element
- (this.length ? jQuery.css( this[0], type ) : null) :
+ jQuery.css( elem, type ) :
// Set the width or height on the element (default to pixels if value is unitless)
this.css( type, typeof size === "string" ? size : size + "px" );
};
});
-})();
+// Expose jQuery to the global object
+window.jQuery = window.$ = jQuery;
+
+})(window);
diff --git a/share/www/script/json2.js b/share/www/script/json2.js
index 48c55361..a1a3b170 100644
--- a/share/www/script/json2.js
+++ b/share/www/script/json2.js
@@ -1,6 +1,6 @@
/*
http://www.JSON.org/json2.js
- 2009-08-17
+ 2010-03-20
Public Domain.
@@ -8,6 +8,14 @@
See http://www.JSON.org/js.html
+
+ This code should be minified before deployment.
+ See http://javascript.crockford.com/jsmin.html
+
+ USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
+ NOT CONTROL.
+
+
This file creates a global JSON object containing two methods: stringify
and parse.
@@ -136,15 +144,9 @@
This is a reference implementation. You are free to copy, modify, or
redistribute.
-
- This code should be minified before deployment.
- See http://javascript.crockford.com/jsmin.html
-
- USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
- NOT CONTROL.
*/
-/*jslint evil: true */
+/*jslint evil: true, strict: false */
/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
@@ -153,7 +155,6 @@
test, toJSON, toString, valueOf
*/
-"use strict";
// Create a JSON object only if one does not already exist. We create the
// methods in a closure to avoid creating global variables.
@@ -432,6 +433,7 @@ if (!this.JSON) {
// Unicode characters with escape sequences. JavaScript handles many characters
// incorrectly, either silently deleting them, or treating them as line endings.
+ text = String(text);
cx.lastIndex = 0;
if (cx.test(text)) {
text = text.replace(cx, function (a) {
diff --git a/share/www/script/jspec/jspec.css b/share/www/script/jspec/jspec.css
new file mode 100644
index 00000000..629d41c5
--- /dev/null
+++ b/share/www/script/jspec/jspec.css
@@ -0,0 +1,149 @@
+body.jspec {
+ margin: 45px 0;
+ font: 12px "Helvetica Neue Light", "Lucida Grande", "Calibri", "Arial", sans-serif;
+ background: #efefef url(images/bg.png) top left repeat-x;
+ text-align: center;
+}
+#jspec {
+ margin: 0 auto;
+ padding-top: 30px;
+ width: 1008px;
+ background: url(images/vr.png) top left repeat-y;
+ text-align: left;
+}
+#jspec-top {
+ position: relative;
+ margin: 0 auto;
+ width: 1008px;
+ height: 40px;
+ background: url(images/sprites.bg.png) top left no-repeat;
+}
+#jspec-bottom {
+ margin: 0 auto;
+ width: 1008px;
+ height: 15px;
+ background: url(images/sprites.bg.png) bottom left no-repeat;
+}
+#jspec .loading {
+ margin-top: -45px;
+ width: 1008px;
+ height: 80px;
+ background: url(images/loading.gif) 50% 50% no-repeat;
+}
+#jspec-title {
+ position: absolute;
+ top: 15px;
+ left: 20px;
+ width: 160px;
+ font-size: 22px;
+ font-weight: normal;
+ background: url(images/sprites.png) 0 -126px no-repeat;
+ text-align: center;
+}
+#jspec-title em {
+ font-size: 10px;
+ font-style: normal;
+ color: #BCC8D1;
+}
+#jspec-report * {
+ margin: 0;
+ padding: 0;
+ background: none;
+ border: none;
+}
+#jspec-report {
+ padding: 15px 40px;
+ font: 11px "Helvetica Neue Light", "Lucida Grande", "Calibri", "Arial", sans-serif;
+ color: #7B8D9B;
+}
+#jspec-report.has-failures {
+ padding-bottom: 30px;
+}
+#jspec-report .hidden {
+ display: none;
+}
+#jspec-report .heading {
+ margin-bottom: 15px;
+}
+#jspec-report .heading span {
+ padding-right: 10px;
+}
+#jspec-report .heading .passes em {
+ color: #0ea0eb;
+}
+#jspec-report .heading .failures em {
+ color: #FA1616;
+}
+#jspec-report table {
+ font-size: 11px;
+ border-collapse: collapse;
+}
+#jspec-report td {
+ padding: 8px;
+ text-indent: 30px;
+ color: #7B8D9B;
+}
+#jspec-report tr.body {
+ display: none;
+}
+#jspec-report tr.body pre {
+ margin: 0;
+ padding: 0 0 5px 25px;
+}
+#jspec-report tr.even:hover + tr.body,
+#jspec-report tr.odd:hover + tr.body {
+ display: block;
+}
+#jspec-report tr td:first-child em {
+ display: block;
+ clear: both;
+ font-style: normal;
+ font-weight: normal;
+ color: #7B8D9B;
+}
+#jspec-report tr.even:hover,
+#jspec-report tr.odd:hover {
+ text-shadow: 1px 1px 1px #fff;
+ background: #F2F5F7;
+}
+#jspec-report td + td {
+ padding-right: 0;
+ width: 15px;
+}
+#jspec-report td.pass {
+ background: url(images/sprites.png) 3px -7px no-repeat;
+}
+#jspec-report td.fail {
+ background: url(images/sprites.png) 3px -158px no-repeat;
+ font-weight: bold;
+ color: #FC0D0D;
+}
+#jspec-report td.requires-implementation {
+ background: url(images/sprites.png) 3px -333px no-repeat;
+}
+#jspec-report tr.description td {
+ margin-top: 25px;
+ padding-top: 25px;
+ font-size: 12px;
+ font-weight: bold;
+ text-indent: 0;
+ color: #1a1a1a;
+}
+#jspec-report tr.description:first-child td {
+ border-top: none;
+}
+#jspec-report .assertion {
+ display: block;
+ float: left;
+ margin: 0 0 0 1px;
+ padding: 0;
+ width: 1px;
+ height: 5px;
+ background: #7B8D9B;
+}
+#jspec-report .assertion.failed {
+ background: red;
+}
+.jspec-sandbox {
+ display: none;
+} \ No newline at end of file
diff --git a/share/www/script/jspec/jspec.jquery.js b/share/www/script/jspec/jspec.jquery.js
new file mode 100644
index 00000000..fcad7ab9
--- /dev/null
+++ b/share/www/script/jspec/jspec.jquery.js
@@ -0,0 +1,72 @@
+
+// JSpec - jQuery - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
+
+JSpec
+.requires('jQuery', 'when using jspec.jquery.js')
+.include({
+ name: 'jQuery',
+
+ // --- Initialize
+
+ init : function() {
+ jQuery.ajaxSetup({ async: false })
+ },
+
+ // --- Utilities
+
+ utilities : {
+ element: jQuery,
+ elements: jQuery,
+ sandbox : function() {
+ return jQuery('<div class="sandbox"></div>')
+ }
+ },
+
+ // --- Matchers
+
+ matchers : {
+ have_tag : "jQuery(expected, actual).length === 1",
+ have_one : "alias have_tag",
+ have_tags : "jQuery(expected, actual).length > 1",
+ have_many : "alias have_tags",
+ have_any : "alias have_tags",
+ have_child : "jQuery(actual).children(expected).length === 1",
+ have_children : "jQuery(actual).children(expected).length > 1",
+ have_text : "jQuery(actual).text() === expected",
+ have_value : "jQuery(actual).val() === expected",
+ be_enabled : "!jQuery(actual).attr('disabled')",
+ have_class : "jQuery(actual).hasClass(expected)",
+
+ be_visible : function(actual) {
+ return jQuery(actual).css('display') != 'none' &&
+ jQuery(actual).css('visibility') != 'hidden' &&
+ jQuery(actual).attr('type') != 'hidden'
+ },
+
+ be_hidden : function(actual) {
+ return !JSpec.does(actual, 'be_visible')
+ },
+
+ have_classes : function(actual) {
+ return !JSpec.any(JSpec.toArray(arguments, 1), function(arg){
+ return !JSpec.does(actual, 'have_class', arg)
+ })
+ },
+
+ have_attr : function(actual, attr, value) {
+ return value ? jQuery(actual).attr(attr) == value:
+ jQuery(actual).attr(attr)
+ },
+
+ 'be disabled selected checked' : function(attr) {
+ return 'jQuery(actual).attr("' + attr + '")'
+ },
+
+ 'have type id title alt href src sel rev name target' : function(attr) {
+ return function(actual, value) {
+ return JSpec.does(actual, 'have_attr', attr, value)
+ }
+ }
+ }
+})
+
diff --git a/share/www/script/jspec/jspec.js b/share/www/script/jspec/jspec.js
new file mode 100644
index 00000000..b2ea4768
--- /dev/null
+++ b/share/www/script/jspec/jspec.js
@@ -0,0 +1,1756 @@
+
+// JSpec - Core - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
+
+;(function(){
+
+ JSpec = {
+ version : '3.3.2',
+ assert : true,
+ cache : {},
+ suites : [],
+ modules : [],
+ allSuites : [],
+ matchers : {},
+ stubbed : [],
+ options : {},
+ request : 'XMLHttpRequest' in this ? XMLHttpRequest : null,
+ stats : { specs: 0, assertions: 0, failures: 0, passes: 0, specsFinished: 0, suitesFinished: 0 },
+
+ /**
+ * Default context in which bodies are evaluated.
+ *
+ * Replace context simply by setting JSpec.context
+ * to your own like below:
+ *
+ * JSpec.context = { foo : 'bar' }
+ *
+ * Contexts can be changed within any body, this can be useful
+ * in order to provide specific helper methods to specific suites.
+ *
+ * To reset (usually in after hook) simply set to null like below:
+ *
+ * JSpec.context = null
+ *
+ */
+
+ defaultContext : {
+
+ /**
+ * Return an object used for proxy assertions.
+ * This object is used to indicate that an object
+ * should be an instance of _object_, not the constructor
+ * itself.
+ *
+ * @param {function} constructor
+ * @return {hash}
+ * @api public
+ */
+
+ an_instance_of : function(constructor) {
+ return { an_instance_of : constructor }
+ },
+
+ /**
+ * Load fixture at _path_.
+ *
+ * Fixtures are resolved as:
+ *
+ * - <path>
+ * - <path>.html
+ *
+ * @param {string} path
+ * @return {string}
+ * @api public
+ */
+
+ fixture : function(path) {
+ if (JSpec.cache[path]) return JSpec.cache[path]
+ return JSpec.cache[path] =
+ JSpec.tryLoading(JSpec.options.fixturePath + '/' + path) ||
+ JSpec.tryLoading(JSpec.options.fixturePath + '/' + path + '.html')
+ }
+ },
+
+ // --- Objects
+
+ reporters : {
+
+ /**
+ * Report to server.
+ *
+ * Options:
+ * - uri specific uri to report to.
+ * - verbose weither or not to output messages
+ * - failuresOnly output failure messages only
+ *
+ * @api public
+ */
+
+ Server : function(results, options) {
+ var uri = options.uri || window.location.protocol + "//" + window.location.host + '/results'
+ JSpec.post(uri, {
+ stats: JSpec.stats,
+ options: options,
+ results: map(results.allSuites, function(suite) {
+ if (suite.hasSpecs())
+ return {
+ description: suite.description,
+ specs: map(suite.specs, function(spec) {
+ return {
+ description: spec.description,
+ message: !spec.passed() ? spec.failure().message : null,
+ status: spec.requiresImplementation() ? 'pending' :
+ spec.passed() ? 'pass' :
+ 'fail',
+ assertions: map(spec.assertions, function(assertion){
+ return {
+ passed: assertion.passed
+ }
+ })
+ }
+ })
+ }
+ })
+ })
+ if ('close' in main) main.close()
+ },
+
+ /**
+ * Default reporter, outputting to the DOM.
+ *
+ * Options:
+ * - reportToId id of element to output reports to, defaults to 'jspec'
+ * - failuresOnly displays only suites with failing specs
+ *
+ * @api public
+ */
+
+ DOM : function(results, options) {
+ var id = option('reportToId') || 'jspec',
+ report = document.getElementById(id),
+ failuresOnly = option('failuresOnly'),
+ classes = results.stats.failures ? 'has-failures' : ''
+ if (!report) throw 'JSpec requires the element #' + id + ' to output its reports'
+
+ function bodyContents(body) {
+ return JSpec.
+ escape(JSpec.contentsOf(body)).
+ replace(/^ */gm, function(a){ return (new Array(Math.round(a.length / 3))).join(' ') }).
+ replace(/\r\n|\r|\n/gm, '<br/>')
+ }
+
+ report.innerHTML = '<div id="jspec-report" class="' + classes + '"><div class="heading"> \
+ <span class="passes">Passes: <em>' + results.stats.passes + '</em></span> \
+ <span class="failures">Failures: <em>' + results.stats.failures + '</em></span> \
+ <span class="passes">Duration: <em>' + results.duration + '</em> ms</span> \
+ </div><table class="suites">' + map(results.allSuites, function(suite) {
+ var displaySuite = failuresOnly ? suite.ran && !suite.passed() : suite.ran
+ if (displaySuite && suite.hasSpecs())
+ return '<tr class="description"><td colspan="2">' + escape(suite.description) + '</td></tr>' +
+ map(suite.specs, function(i, spec) {
+ return '<tr class="' + (i % 2 ? 'odd' : 'even') + '">' +
+ (spec.requiresImplementation() ?
+ '<td class="requires-implementation" colspan="2">' + escape(spec.description) + '</td>' :
+ (spec.passed() && !failuresOnly) ?
+ '<td class="pass">' + escape(spec.description)+ '</td><td>' + spec.assertionsGraph() + '</td>' :
+ !spec.passed() ?
+ '<td class="fail">' + escape(spec.description) +
+ map(spec.failures(), function(a){ return '<em>' + escape(a.message) + '</em>' }).join('') +
+ '</td><td>' + spec.assertionsGraph() + '</td>' :
+ '') +
+ '<tr class="body"><td colspan="2"><pre>' + bodyContents(spec.body) + '</pre></td></tr>'
+ }).join('') + '</tr>'
+ }).join('') + '</table></div>'
+ },
+
+ /**
+ * Terminal reporter.
+ *
+ * @api public
+ */
+
+ Terminal : function(results, options) {
+ var failuresOnly = option('failuresOnly')
+ print(color("\n Passes: ", 'bold') + color(results.stats.passes, 'green') +
+ color(" Failures: ", 'bold') + color(results.stats.failures, 'red') +
+ color(" Duration: ", 'bold') + color(results.duration, 'green') + " ms \n")
+
+ function indent(string) {
+ return string.replace(/^(.)/gm, ' $1')
+ }
+
+ each(results.allSuites, function(suite) {
+ var displaySuite = failuresOnly ? suite.ran && !suite.passed() : suite.ran
+ if (displaySuite && suite.hasSpecs()) {
+ print(color(' ' + suite.description, 'bold'))
+ each(suite.specs, function(spec){
+ var assertionsGraph = inject(spec.assertions, '', function(graph, assertion){
+ return graph + color('.', assertion.passed ? 'green' : 'red')
+ })
+ if (spec.requiresImplementation())
+ print(color(' ' + spec.description, 'blue') + assertionsGraph)
+ else if (spec.passed() && !failuresOnly)
+ print(color(' ' + spec.description, 'green') + assertionsGraph)
+ else if (!spec.passed())
+ print(color(' ' + spec.description, 'red') + assertionsGraph +
+ "\n" + indent(map(spec.failures(), function(a){ return a.message }).join("\n")) + "\n")
+ })
+ print("")
+ }
+ })
+
+ quit(results.stats.failures)
+ }
+ },
+
+ Assertion : function(matcher, actual, expected, negate) {
+ extend(this, {
+ message: '',
+ passed: false,
+ actual: actual,
+ negate: negate,
+ matcher: matcher,
+ expected: expected,
+
+ // Report assertion results
+
+ report : function() {
+ if (JSpec.assert)
+ this.passed ? JSpec.stats.passes++ : JSpec.stats.failures++
+ return this
+ },
+
+ // Run the assertion
+
+ run : function() {
+ // TODO: remove unshifting
+ expected.unshift(actual)
+ this.result = matcher.match.apply(this, expected)
+ this.passed = negate ? !this.result : this.result
+ if (!this.passed) this.message = matcher.message.call(this, actual, expected, negate, matcher.name)
+ return this
+ }
+ })
+ },
+
+ ProxyAssertion : function(object, method, times, negate) {
+ var self = this
+ var old = object[method]
+
+ // Proxy
+
+ object[method] = function(){
+ args = toArray(arguments)
+ result = old.apply(object, args)
+ self.calls.push({ args : args, result : result })
+ return result
+ }
+
+ // Times
+
+ this.times = {
+ once : 1,
+ twice : 2
+ }[times] || times || 1
+
+ extend(this, {
+ calls: [],
+ message: '',
+ defer: true,
+ passed: false,
+ negate: negate,
+ object: object,
+ method: method,
+
+ // Proxy return value
+
+ and_return : function(result) {
+ this.expectedResult = result
+ return this
+ },
+
+ // Proxy arguments passed
+
+ with_args : function() {
+ this.expectedArgs = toArray(arguments)
+ return this
+ },
+
+ // Check if any calls have failing results
+
+ anyResultsFail : function() {
+ return any(this.calls, function(call){
+ return self.expectedResult.an_instance_of ?
+ call.result.constructor != self.expectedResult.an_instance_of:
+ !equal(self.expectedResult, call.result)
+ })
+ },
+
+ // Check if any calls have passing results
+
+ anyResultsPass : function() {
+ return any(this.calls, function(call){
+ return self.expectedResult.an_instance_of ?
+ call.result.constructor == self.expectedResult.an_instance_of:
+ equal(self.expectedResult, call.result)
+ })
+ },
+
+ // Return the passing result
+
+ passingResult : function() {
+ return this.anyResultsPass().result
+ },
+
+ // Return the failing result
+
+ failingResult : function() {
+ return this.anyResultsFail().result
+ },
+
+ // Check if any arguments fail
+
+ anyArgsFail : function() {
+ return any(this.calls, function(call){
+ return any(self.expectedArgs, function(i, arg){
+ if (arg == null) return call.args[i] == null
+ return arg.an_instance_of ?
+ call.args[i].constructor != arg.an_instance_of:
+ !equal(arg, call.args[i])
+
+ })
+ })
+ },
+
+ // Check if any arguments pass
+
+ anyArgsPass : function() {
+ return any(this.calls, function(call){
+ return any(self.expectedArgs, function(i, arg){
+ return arg.an_instance_of ?
+ call.args[i].constructor == arg.an_instance_of:
+ equal(arg, call.args[i])
+
+ })
+ })
+ },
+
+ // Return the passing args
+
+ passingArgs : function() {
+ return this.anyArgsPass().args
+ },
+
+ // Return the failing args
+
+ failingArgs : function() {
+ return this.anyArgsFail().args
+ },
+
+ // Report assertion results
+
+ report : function() {
+ if (JSpec.assert)
+ this.passed ? ++JSpec.stats.passes : ++JSpec.stats.failures
+ return this
+ },
+
+ // Run the assertion
+
+ run : function() {
+ var methodString = 'expected ' + object.toString() + '.' + method + '()' + (negate ? ' not' : '' )
+
+ function times(n) {
+ return n > 2 ? n + ' times' : { 1: 'once', 2: 'twice' }[n]
+ }
+
+ if (this.expectedResult != null && (negate ? this.anyResultsPass() : this.anyResultsFail()))
+ this.message = methodString + ' to return ' + puts(this.expectedResult) +
+ ' but ' + (negate ? 'it did' : 'got ' + puts(this.failingResult()))
+
+ if (this.expectedArgs && (negate ? !this.expectedResult && this.anyArgsPass() : this.anyArgsFail()))
+ this.message = methodString + ' to be called with ' + puts.apply(this, this.expectedArgs) +
+ ' but was' + (negate ? '' : ' called with ' + puts.apply(this, this.failingArgs()))
+
+ if (negate ? !this.expectedResult && !this.expectedArgs && this.calls.length >= this.times : this.calls.length != this.times)
+ this.message = methodString + ' to be called ' + times(this.times) +
+ ', but ' + (this.calls.length == 0 ? ' was not called' : ' was called ' + times(this.calls.length))
+
+ if (!this.message.length)
+ this.passed = true
+
+ return this
+ }
+ })
+ },
+
+ /**
+ * Specification Suite block object.
+ *
+ * @param {string} description
+ * @param {function} body
+ * @api private
+ */
+
+ Suite : function(description, body) {
+ var self = this
+ extend(this, {
+ body: body,
+ description: description,
+ suites: [],
+ specs: [],
+ ran: false,
+ hooks: { 'before' : [], 'after' : [], 'before_each' : [], 'after_each' : [] },
+
+ // Add a spec to the suite
+
+ addSpec : function(description, body) {
+ var spec = new JSpec.Spec(description, body)
+ this.specs.push(spec)
+ JSpec.stats.specs++ // TODO: abstract
+ spec.suite = this
+ },
+
+ // Add a hook to the suite
+
+ addHook : function(hook, body) {
+ this.hooks[hook].push(body)
+ },
+
+ // Add a nested suite
+
+ addSuite : function(description, body) {
+ var suite = new JSpec.Suite(description, body)
+ JSpec.allSuites.push(suite)
+ suite.name = suite.description
+ suite.description = this.description + ' ' + suite.description
+ this.suites.push(suite)
+ suite.suite = this
+ },
+
+ // Invoke a hook in context to this suite
+
+ hook : function(hook) {
+ if (this.suite) this.suite.hook(hook)
+ each(this.hooks[hook], function(body) {
+ JSpec.evalBody(body, "Error in hook '" + hook + "', suite '" + self.description + "': ")
+ })
+ },
+
+ // Check if nested suites are present
+
+ hasSuites : function() {
+ return this.suites.length
+ },
+
+ // Check if this suite has specs
+
+ hasSpecs : function() {
+ return this.specs.length
+ },
+
+ // Check if the entire suite passed
+
+ passed : function() {
+ return !any(this.specs, function(spec){
+ return !spec.passed()
+ })
+ }
+ })
+ },
+
+ /**
+ * Specification block object.
+ *
+ * @param {string} description
+ * @param {function} body
+ * @api private
+ */
+
+ Spec : function(description, body) {
+ extend(this, {
+ body: body,
+ description: description,
+ assertions: [],
+
+ // Add passing assertion
+
+ pass : function(message) {
+ this.assertions.push({ passed: true, message: message })
+ if (JSpec.assert) ++JSpec.stats.passes
+ },
+
+ // Add failing assertion
+
+ fail : function(message) {
+ this.assertions.push({ passed: false, message: message })
+ if (JSpec.assert) ++JSpec.stats.failures
+ },
+
+ // Run deferred assertions
+
+ runDeferredAssertions : function() {
+ each(this.assertions, function(assertion){
+ if (assertion.defer) assertion.run().report(), hook('afterAssertion', assertion)
+ })
+ },
+
+ // Find first failing assertion
+
+ failure : function() {
+ return find(this.assertions, function(assertion){
+ return !assertion.passed
+ })
+ },
+
+ // Find all failing assertions
+
+ failures : function() {
+ return select(this.assertions, function(assertion){
+ return !assertion.passed
+ })
+ },
+
+ // Weither or not the spec passed
+
+ passed : function() {
+ return !this.failure()
+ },
+
+ // Weither or not the spec requires implementation (no assertions)
+
+ requiresImplementation : function() {
+ return this.assertions.length == 0
+ },
+
+ // Sprite based assertions graph
+
+ assertionsGraph : function() {
+ return map(this.assertions, function(assertion){
+ return '<span class="assertion ' + (assertion.passed ? 'passed' : 'failed') + '"></span>'
+ }).join('')
+ }
+ })
+ },
+
+ Module : function(methods) {
+ extend(this, methods)
+ },
+
+ JSON : {
+
+ /**
+ * Generic sequences.
+ */
+
+ meta : {
+ '\b' : '\\b',
+ '\t' : '\\t',
+ '\n' : '\\n',
+ '\f' : '\\f',
+ '\r' : '\\r',
+ '"' : '\\"',
+ '\\' : '\\\\'
+ },
+
+ /**
+ * Escapable sequences.
+ */
+
+ escapable : /[\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+
+ /**
+ * JSON encode _object_.
+ *
+ * @param {mixed} object
+ * @return {string}
+ * @api private
+ */
+
+ encode : function(object) {
+ var self = this
+ if (object == undefined || object == null) return 'null'
+ if (object === true) return 'true'
+ if (object === false) return 'false'
+ switch (typeof object) {
+ case 'number': return object
+ case 'string': return this.escapable.test(object) ?
+ '"' + object.replace(this.escapable, function (a) {
+ return typeof self.meta[a] === 'string' ? self.meta[a] :
+ '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4)
+ }) + '"' :
+ '"' + object + '"'
+ case 'object':
+ if (object.constructor == Array)
+ return '[' + map(object, function(val){
+ return self.encode(val)
+ }).join(', ') + ']'
+ else if (object)
+ return '{' + map(object, function(key, val){
+ return self.encode(key) + ':' + self.encode(val)
+ }).join(', ') + '}'
+ }
+ return 'null'
+ }
+ },
+
+ // --- DSLs
+
+ DSLs : {
+ snake : {
+ expect : function(actual){
+ return JSpec.expect(actual)
+ },
+
+ describe : function(description, body) {
+ return JSpec.currentSuite.addSuite(description, body)
+ },
+
+ it : function(description, body) {
+ return JSpec.currentSuite.addSpec(description, body)
+ },
+
+ before : function(body) {
+ return JSpec.currentSuite.addHook('before', body)
+ },
+
+ after : function(body) {
+ return JSpec.currentSuite.addHook('after', body)
+ },
+
+ before_each : function(body) {
+ return JSpec.currentSuite.addHook('before_each', body)
+ },
+
+ after_each : function(body) {
+ return JSpec.currentSuite.addHook('after_each', body)
+ },
+
+ should_behave_like : function(description) {
+ return JSpec.shareBehaviorsOf(description)
+ }
+ }
+ },
+
+ // --- Methods
+
+ /**
+ * Check if _value_ is 'stop'. For use as a
+ * utility callback function.
+ *
+ * @param {mixed} value
+ * @return {bool}
+ * @api public
+ */
+
+ haveStopped : function(value) {
+ return value === 'stop'
+ },
+
+ /**
+ * Include _object_ which may be a hash or Module instance.
+ *
+ * @param {hash, Module} object
+ * @return {JSpec}
+ * @api public
+ */
+
+ include : function(object) {
+ var module = object.constructor == JSpec.Module ? object : new JSpec.Module(object)
+ this.modules.push(module)
+ if ('init' in module) module.init()
+ if ('utilities' in module) extend(this.defaultContext, module.utilities)
+ if ('matchers' in module) this.addMatchers(module.matchers)
+ if ('reporters' in module) extend(this.reporters, module.reporters)
+ if ('DSLs' in module)
+ each(module.DSLs, function(name, methods){
+ JSpec.DSLs[name] = JSpec.DSLs[name] || {}
+ extend(JSpec.DSLs[name], methods)
+ })
+ return this
+ },
+
+ /**
+ * Add a module hook _name_, which is immediately
+ * called per module with the _args_ given. An array of
+ * hook return values is returned.
+ *
+ * @param {name} string
+ * @param {...} args
+ * @return {array}
+ * @api private
+ */
+
+ hook : function(name, args) {
+ args = toArray(arguments, 1)
+ return inject(JSpec.modules, [], function(results, module){
+ if (typeof module[name] == 'function')
+ results.push(JSpec.evalHook(module, name, args))
+ })
+ },
+
+ /**
+ * Eval _module_ hook _name_ with _args_. Evaluates in context
+ * to the module itself, JSpec, and JSpec.context.
+ *
+ * @param {Module} module
+ * @param {string} name
+ * @param {array} args
+ * @return {mixed}
+ * @api private
+ */
+
+ evalHook : function(module, name, args) {
+ hook('evaluatingHookBody', module, name)
+ try { return module[name].apply(module, args) }
+ catch(e) { error('Error in hook ' + module.name + '.' + name + ': ', e) }
+ },
+
+ /**
+ * Same as hook() however accepts only one _arg_ which is
+ * considered immutable. This function passes the arg
+ * to the first module, then passes the return value of the last
+ * module called, to the following module.
+ *
+ * @param {string} name
+ * @param {mixed} arg
+ * @return {mixed}
+ * @api private
+ */
+
+ hookImmutable : function(name, arg) {
+ return inject(JSpec.modules, arg, function(result, module){
+ if (typeof module[name] == 'function')
+ return JSpec.evalHook(module, name, [result])
+ })
+ },
+
+ /**
+ * Find a suite by its description or name.
+ *
+ * @param {string} description
+ * @return {Suite}
+ * @api private
+ */
+
+ findSuite : function(description) {
+ return find(this.allSuites, function(suite){
+ return suite.name == description || suite.description == description
+ })
+ },
+
+ /**
+ * Share behaviors (specs) of the given suite with
+ * the current suite.
+ *
+ * @param {string} description
+ * @api public
+ */
+
+ shareBehaviorsOf : function(description) {
+ if (suite = this.findSuite(description)) this.copySpecs(suite, this.currentSuite)
+ else throw 'failed to share behaviors. ' + puts(description) + ' is not a valid Suite name'
+ },
+
+ /**
+ * Copy specs from one suite to another.
+ *
+ * @param {Suite} fromSuite
+ * @param {Suite} toSuite
+ * @api public
+ */
+
+ copySpecs : function(fromSuite, toSuite) {
+ each(fromSuite.specs, function(spec){
+ var newSpec = new Object();
+ extend(newSpec, spec);
+ newSpec.assertions = [];
+ toSuite.specs.push(newSpec);
+ })
+ },
+
+ /**
+ * Convert arguments to an array.
+ *
+ * @param {object} arguments
+ * @param {int} offset
+ * @return {array}
+ * @api public
+ */
+
+ toArray : function(arguments, offset) {
+ return Array.prototype.slice.call(arguments, offset || 0)
+ },
+
+ /**
+ * Return ANSI-escaped colored string.
+ *
+ * @param {string} string
+ * @param {string} color
+ * @return {string}
+ * @api public
+ */
+
+ color : function(string, color) {
+ return "\u001B[" + {
+ bold : 1,
+ black : 30,
+ red : 31,
+ green : 32,
+ yellow : 33,
+ blue : 34,
+ magenta : 35,
+ cyan : 36,
+ white : 37
+ }[color] + 'm' + string + "\u001B[0m"
+ },
+
+ /**
+ * Default matcher message callback.
+ *
+ * @api private
+ */
+
+ defaultMatcherMessage : function(actual, expected, negate, name) {
+ return 'expected ' + puts(actual) + ' to ' +
+ (negate ? 'not ' : '') +
+ name.replace(/_/g, ' ') +
+ ' ' + (expected.length > 1 ?
+ puts.apply(this, expected.slice(1)) :
+ '')
+ },
+
+ /**
+ * Normalize a matcher message.
+ *
+ * When no messge callback is present the defaultMatcherMessage
+ * will be assigned, will suffice for most matchers.
+ *
+ * @param {hash} matcher
+ * @return {hash}
+ * @api public
+ */
+
+ normalizeMatcherMessage : function(matcher) {
+ if (typeof matcher.message != 'function')
+ matcher.message = this.defaultMatcherMessage
+ return matcher
+ },
+
+ /**
+ * Normalize a matcher body
+ *
+ * This process allows the following conversions until
+ * the matcher is in its final normalized hash state.
+ *
+ * - '==' becomes 'actual == expected'
+ * - 'actual == expected' becomes 'return actual == expected'
+ * - function(actual, expected) { return actual == expected } becomes
+ * { match : function(actual, expected) { return actual == expected }}
+ *
+ * @param {mixed} body
+ * @return {hash}
+ * @api public
+ */
+
+ normalizeMatcherBody : function(body) {
+ switch (body.constructor) {
+ case String:
+ if (captures = body.match(/^alias (\w+)/)) return JSpec.matchers[last(captures)]
+ if (body.length < 4) body = 'actual ' + body + ' expected'
+ return { match: function(actual, expected) { return eval(body) }}
+
+ case Function:
+ return { match: body }
+
+ default:
+ return body
+ }
+ },
+
+ /**
+ * Get option value. This method first checks if
+ * the option key has been set via the query string,
+ * otherwise returning the options hash value.
+ *
+ * @param {string} key
+ * @return {mixed}
+ * @api public
+ */
+
+ option : function(key) {
+ return (value = query(key)) !== null ? value :
+ JSpec.options[key] || null
+ },
+
+ /**
+ * Check if object _a_, is equal to object _b_.
+ *
+ * @param {object} a
+ * @param {object} b
+ * @return {bool}
+ * @api private
+ */
+
+ equal: function(a, b) {
+ if (typeof a != typeof b) return
+ if (a === b) return true
+ if (a instanceof RegExp)
+ return a.toString() === b.toString()
+ if (a instanceof Date)
+ return Number(a) === Number(b)
+ if (typeof a != 'object') return
+ if (a.length !== undefined)
+ if (a.length !== b.length) return
+ else
+ for (var i = 0, len = a.length; i < len; ++i)
+ if (!equal(a[i], b[i]))
+ return
+ for (var key in a)
+ if (!equal(a[key], b[key]))
+ return
+ return true
+ },
+
+ /**
+ * Return last element of an array.
+ *
+ * @param {array} array
+ * @return {object}
+ * @api public
+ */
+
+ last : function(array) {
+ return array[array.length - 1]
+ },
+
+ /**
+ * Convert object(s) to a print-friend string.
+ *
+ * @param {...} object
+ * @return {string}
+ * @api public
+ */
+
+ puts : function(object) {
+ if (arguments.length > 1)
+ return map(toArray(arguments), function(arg){
+ return puts(arg)
+ }).join(', ')
+ if (object === undefined) return 'undefined'
+ if (object === null) return 'null'
+ if (object === true) return 'true'
+ if (object === false) return 'false'
+ if (object.an_instance_of) return 'an instance of ' + object.an_instance_of.name
+ if (object.jquery && object.selector.length > 0) return 'selector ' + puts(object.selector)
+ if (object.jquery) return object.get(0).outerHTML
+ if (object.nodeName) return object.outerHTML
+ switch (object.constructor) {
+ case Function: return object.name || object
+ case String:
+ return '"' + object
+ .replace(/"/g, '\\"')
+ .replace(/\n/g, '\\n')
+ .replace(/\t/g, '\\t')
+ + '"'
+ case Array:
+ return inject(object, '[', function(b, v){
+ return b + ', ' + puts(v)
+ }).replace('[,', '[') + ' ]'
+ case Object:
+ object.__hit__ = true
+ return inject(object, '{', function(b, k, v) {
+ if (k == '__hit__') return b
+ return b + ', ' + k + ': ' + (v && v.__hit__ ? '<circular reference>' : puts(v))
+ }).replace('{,', '{') + ' }'
+ default:
+ return object.toString()
+ }
+ },
+
+ /**
+ * Escape HTML.
+ *
+ * @param {string} html
+ * @return {string}
+ * @api public
+ */
+
+ escape : function(html) {
+ return html.toString()
+ .replace(/&/gmi, '&amp;')
+ .replace(/"/gmi, '&quot;')
+ .replace(/>/gmi, '&gt;')
+ .replace(/</gmi, '&lt;')
+ },
+
+ /**
+ * Perform an assertion without reporting.
+ *
+ * This method is primarily used for internal
+ * matchers in order retain DRYness. May be invoked
+ * like below:
+ *
+ * does('foo', 'eql', 'foo')
+ * does([1,2], 'include', 1, 2)
+ *
+ * External hooks are not run for internal assertions
+ * performed by does().
+ *
+ * @param {mixed} actual
+ * @param {string} matcher
+ * @param {...} expected
+ * @return {mixed}
+ * @api private
+ */
+
+ does : function(actual, matcher, expected) {
+ var assertion = new JSpec.Assertion(JSpec.matchers[matcher], actual, toArray(arguments, 2))
+ return assertion.run().result
+ },
+
+ /**
+ * Perform an assertion.
+ *
+ * expect(true).to('be', true)
+ * expect('foo').not_to('include', 'bar')
+ * expect([1, [2]]).to('include', 1, [2])
+ *
+ * @param {mixed} actual
+ * @return {hash}
+ * @api public
+ */
+
+ expect : function(actual) {
+ function assert(matcher, args, negate) {
+ var expected = toArray(args, 1)
+ matcher.negate = negate
+ assertion = new JSpec.Assertion(matcher, actual, expected, negate)
+ hook('beforeAssertion', assertion)
+ if (matcher.defer) assertion.run()
+ else JSpec.currentSpec.assertions.push(assertion.run().report()), hook('afterAssertion', assertion)
+ return assertion.result
+ }
+
+ function to(matcher) {
+ return assert(matcher, arguments, false)
+ }
+
+ function not_to(matcher) {
+ return assert(matcher, arguments, true)
+ }
+
+ return {
+ to : to,
+ should : to,
+ not_to: not_to,
+ should_not : not_to
+ }
+ },
+
+ /**
+ * Strim whitespace or chars.
+ *
+ * @param {string} string
+ * @param {string} chars
+ * @return {string}
+ * @api public
+ */
+
+ strip : function(string, chars) {
+ return string.
+ replace(new RegExp('[' + (chars || '\\s') + ']*$'), '').
+ replace(new RegExp('^[' + (chars || '\\s') + ']*'), '')
+ },
+
+ /**
+ * Call an iterator callback with arguments a, or b
+ * depending on the arity of the callback.
+ *
+ * @param {function} callback
+ * @param {mixed} a
+ * @param {mixed} b
+ * @return {mixed}
+ * @api private
+ */
+
+ callIterator : function(callback, a, b) {
+ return callback.length == 1 ? callback(b) : callback(a, b)
+ },
+
+ /**
+ * Extend an object with another.
+ *
+ * @param {object} object
+ * @param {object} other
+ * @api public
+ */
+
+ extend : function(object, other) {
+ each(other, function(property, value){
+ object[property] = value
+ })
+ },
+
+ /**
+ * Iterate an object, invoking the given callback.
+ *
+ * @param {hash, array} object
+ * @param {function} callback
+ * @return {JSpec}
+ * @api public
+ */
+
+ each : function(object, callback) {
+ if (object.constructor == Array)
+ for (var i = 0, len = object.length; i < len; ++i)
+ callIterator(callback, i, object[i])
+ else
+ for (var key in object)
+ if (object.hasOwnProperty(key))
+ callIterator(callback, key, object[key])
+ },
+
+ /**
+ * Iterate with memo.
+ *
+ * @param {hash, array} object
+ * @param {object} memo
+ * @param {function} callback
+ * @return {object}
+ * @api public
+ */
+
+ inject : function(object, memo, callback) {
+ each(object, function(key, value){
+ memo = (callback.length == 2 ?
+ callback(memo, value):
+ callback(memo, key, value)) ||
+ memo
+ })
+ return memo
+ },
+
+ /**
+ * Destub _object_'s _method_. When no _method_ is passed
+ * all stubbed methods are destubbed. When no arguments
+ * are passed every object found in JSpec.stubbed will be
+ * destubbed.
+ *
+ * @param {mixed} object
+ * @param {string} method
+ * @api public
+ */
+
+ destub : function(object, method) {
+ if (method) {
+ if (object['__prototype__' + method])
+ delete object[method]
+ else
+ object[method] = object['__original__' + method]
+ delete object['__prototype__' + method]
+ delete object['__original____' + method]
+ }
+ else if (object) {
+ for (var key in object)
+ if (captures = key.match(/^(?:__prototype__|__original__)(.*)/))
+ destub(object, captures[1])
+ }
+ else
+ while (JSpec.stubbed.length)
+ destub(JSpec.stubbed.shift())
+ },
+
+ /**
+ * Stub _object_'s _method_.
+ *
+ * stub(foo, 'toString').and_return('bar')
+ *
+ * @param {mixed} object
+ * @param {string} method
+ * @return {hash}
+ * @api public
+ */
+
+ stub : function(object, method) {
+ hook('stubbing', object, method)
+ JSpec.stubbed.push(object)
+ var type = object.hasOwnProperty(method) ? '__original__' : '__prototype__'
+ object[type + method] = object[method]
+ object[method] = function(){}
+ return {
+ and_return : function(value) {
+ if (typeof value == 'function') object[method] = value
+ else object[method] = function(){ return value }
+ }
+ }
+ },
+
+ /**
+ * Map callback return values.
+ *
+ * @param {hash, array} object
+ * @param {function} callback
+ * @return {array}
+ * @api public
+ */
+
+ map : function(object, callback) {
+ return inject(object, [], function(memo, key, value){
+ memo.push(callIterator(callback, key, value))
+ })
+ },
+
+ /**
+ * Returns the first matching expression or null.
+ *
+ * @param {hash, array} object
+ * @param {function} callback
+ * @return {mixed}
+ * @api public
+ */
+
+ any : function(object, callback) {
+ return inject(object, null, function(state, key, value){
+ if (state == undefined)
+ return callIterator(callback, key, value) ? value : state
+ })
+ },
+
+ /**
+ * Returns an array of values collected when the callback
+ * given evaluates to true.
+ *
+ * @param {hash, array} object
+ * @return {function} callback
+ * @return {array}
+ * @api public
+ */
+
+ select : function(object, callback) {
+ return inject(object, [], function(selected, key, value){
+ if (callIterator(callback, key, value))
+ selected.push(value)
+ })
+ },
+
+ /**
+ * Define matchers.
+ *
+ * @param {hash} matchers
+ * @api public
+ */
+
+ addMatchers : function(matchers) {
+ each(matchers, function(name, body){
+ JSpec.addMatcher(name, body)
+ })
+ },
+
+ /**
+ * Define a matcher.
+ *
+ * @param {string} name
+ * @param {hash, function, string} body
+ * @api public
+ */
+
+ addMatcher : function(name, body) {
+ hook('addingMatcher', name, body)
+ if (name.indexOf(' ') != -1) {
+ var matchers = name.split(/\s+/)
+ var prefix = matchers.shift()
+ each(matchers, function(name) {
+ JSpec.addMatcher(prefix + '_' + name, body(name))
+ })
+ }
+ this.matchers[name] = this.normalizeMatcherMessage(this.normalizeMatcherBody(body))
+ this.matchers[name].name = name
+ },
+
+ /**
+ * Add a root suite to JSpec.
+ *
+ * @param {string} description
+ * @param {body} function
+ * @api public
+ */
+
+ describe : function(description, body) {
+ var suite = new JSpec.Suite(description, body)
+ hook('addingSuite', suite)
+ this.allSuites.push(suite)
+ this.suites.push(suite)
+ },
+
+ /**
+ * Return the contents of a function body.
+ *
+ * @param {function} body
+ * @return {string}
+ * @api public
+ */
+
+ contentsOf : function(body) {
+ return body.toString().match(/^[^\{]*{((.*\n*)*)}/m)[1]
+ },
+
+ /**
+ * Evaluate a JSpec capture body.
+ *
+ * @param {function} body
+ * @param {string} errorMessage (optional)
+ * @return {Type}
+ * @api private
+ */
+
+ evalBody : function(body, errorMessage) {
+ var dsl = this.DSL || this.DSLs.snake
+ var matchers = this.matchers
+ var context = this.context || this.defaultContext
+ var contents = this.contentsOf(body)
+ hook('evaluatingBody', dsl, matchers, context, contents)
+ try { with (dsl){ with (context) { with (matchers) { eval(contents) }}} }
+ catch(e) { error(errorMessage, e) }
+ },
+
+ /**
+ * Pre-process a string of JSpec.
+ *
+ * @param {string} input
+ * @return {string}
+ * @api private
+ */
+
+ preprocess : function(input) {
+ if (typeof input != 'string') return
+ input = hookImmutable('preprocessing', input)
+ return input.
+ replace(/\t/g, ' ').
+ replace(/\r\n|\n|\r/g, '\n').
+ split('__END__')[0].
+ replace(/([\w\.]+)\.(stub|destub)\((.*?)\)$/gm, '$2($1, $3)').
+ replace(/describe\s+(.*?)$/gm, 'describe($1, function(){').
+ replace(/^\s+it\s+(.*?)$/gm, ' it($1, function(){').
+ replace(/^ *(before_each|after_each|before|after)(?= |\n|$)/gm, 'JSpec.currentSuite.addHook("$1", function(){').
+ replace(/^\s*end(?=\s|$)/gm, '});').
+ replace(/-\{/g, 'function(){').
+ replace(/(\d+)\.\.(\d+)/g, function(_, a, b){ return range(a, b) }).
+ replace(/\.should([_\.]not)?[_\.](\w+)(?: |;|$)(.*)$/gm, '.should$1_$2($3)').
+ replace(/([\/\s]*)(.+?)\.(should(?:[_\.]not)?)[_\.](\w+)\((.*)\)\s*;?$/gm, '$1 expect($2).$3($4, $5)').
+ replace(/, \)/g, ')').
+ replace(/should\.not/g, 'should_not')
+ },
+
+ /**
+ * Create a range string which can be evaluated to a native array.
+ *
+ * @param {int} start
+ * @param {int} end
+ * @return {string}
+ * @api public
+ */
+
+ range : function(start, end) {
+ var current = parseInt(start), end = parseInt(end), values = [current]
+ if (end > current) while (++current <= end) values.push(current)
+ else while (--current >= end) values.push(current)
+ return '[' + values + ']'
+ },
+
+ /**
+ * Report on the results.
+ *
+ * @api public
+ */
+
+ report : function() {
+ this.duration = Number(new Date) - this.start
+ hook('reporting', JSpec.options)
+ new (JSpec.options.reporter || JSpec.reporters.DOM)(JSpec, JSpec.options)
+ },
+
+ /**
+ * Run the spec suites. Options are merged
+ * with JSpec options when present.
+ *
+ * @param {hash} options
+ * @return {JSpec}
+ * @api public
+ */
+
+ run : function(options) {
+ if (any(hook('running'), haveStopped)) return this
+ if (options) extend(this.options, options)
+ this.start = Number(new Date)
+ each(this.suites, function(suite) { JSpec.runSuite(suite) })
+ return this
+ },
+
+ /**
+ * Run a suite.
+ *
+ * @param {Suite} suite
+ * @api public
+ */
+
+ runSuite : function(suite) {
+ this.currentSuite = suite
+ this.evalBody(suite.body)
+ suite.ran = true
+ hook('beforeSuite', suite), suite.hook('before')
+ each(suite.specs, function(spec) {
+ hook('beforeSpec', spec)
+ suite.hook('before_each')
+ JSpec.runSpec(spec)
+ hook('afterSpec', spec)
+ suite.hook('after_each')
+ })
+ if (suite.hasSuites()) {
+ each(suite.suites, function(suite) {
+ JSpec.runSuite(suite)
+ })
+ }
+ hook('afterSuite', suite), suite.hook('after')
+ this.stats.suitesFinished++
+ },
+
+ /**
+ * Report a failure for the current spec.
+ *
+ * @param {string} message
+ * @api public
+ */
+
+ fail : function(message) {
+ JSpec.currentSpec.fail(message)
+ },
+
+ /**
+ * Report a passing assertion for the current spec.
+ *
+ * @param {string} message
+ * @api public
+ */
+
+ pass : function(message) {
+ JSpec.currentSpec.pass(message)
+ },
+
+ /**
+ * Run a spec.
+ *
+ * @param {Spec} spec
+ * @api public
+ */
+
+ runSpec : function(spec) {
+ this.currentSpec = spec
+ try { this.evalBody(spec.body) }
+ catch (e) { fail(e) }
+ spec.runDeferredAssertions()
+ destub()
+ this.stats.specsFinished++
+ this.stats.assertions += spec.assertions.length
+ },
+
+ /**
+ * Require a dependency, with optional message.
+ *
+ * @param {string} dependency
+ * @param {string} message (optional)
+ * @return {JSpec}
+ * @api public
+ */
+
+ requires : function(dependency, message) {
+ hook('requiring', dependency, message)
+ try { eval(dependency) }
+ catch (e) { throw 'JSpec depends on ' + dependency + ' ' + message }
+ return this
+ },
+
+ /**
+ * Query against the current query strings keys
+ * or the queryString specified.
+ *
+ * @param {string} key
+ * @param {string} queryString
+ * @return {string, null}
+ * @api private
+ */
+
+ query : function(key, queryString) {
+ var queryString = (queryString || (main.location ? main.location.search : null) || '').substring(1)
+ return inject(queryString.split('&'), null, function(value, pair){
+ parts = pair.split('=')
+ return parts[0] == key ? parts[1].replace(/%20|\+/gmi, ' ') : value
+ })
+ },
+
+ /**
+ * Throw a JSpec related error.
+ *
+ * @param {string} message
+ * @param {Exception} e
+ * @api public
+ */
+
+ error : function(message, e) {
+ throw (message ? message : '') + e.toString() +
+ (e.line ? ' near line ' + e.line : '')
+ },
+
+ /**
+ * Ad-hoc POST request for JSpec server usage.
+ *
+ * @param {string} uri
+ * @param {string} data
+ * @api private
+ */
+
+ post : function(uri, data) {
+ if (any(hook('posting', uri, data), haveStopped)) return
+ var request = this.xhr()
+ request.open('POST', uri, false)
+ request.setRequestHeader('Content-Type', 'application/json')
+ request.send(JSpec.JSON.encode(data))
+ },
+
+ /**
+ * Instantiate an XMLHttpRequest.
+ *
+ * Here we utilize IE's lame ActiveXObjects first which
+ * allow IE access serve files via the file: protocol, otherwise
+ * we then default to XMLHttpRequest.
+ *
+ * @return {XMLHttpRequest, ActiveXObject}
+ * @api private
+ */
+
+ xhr : function() {
+ return this.ieXhr() || new JSpec.request
+ },
+
+ /**
+ * Return Microsoft piece of crap ActiveXObject.
+ *
+ * @return {ActiveXObject}
+ * @api public
+ */
+
+ ieXhr : function() {
+ function object(str) {
+ try { return new ActiveXObject(str) } catch(e) {}
+ }
+ return object('Msxml2.XMLHTTP.6.0') ||
+ object('Msxml2.XMLHTTP.3.0') ||
+ object('Msxml2.XMLHTTP') ||
+ object('Microsoft.XMLHTTP')
+ },
+
+ /**
+ * Check for HTTP request support.
+ *
+ * @return {bool}
+ * @api private
+ */
+
+ hasXhr : function() {
+ return JSpec.request || 'ActiveXObject' in main
+ },
+
+ /**
+ * Try loading _file_ returning the contents
+ * string or null. Chain to locate / read a file.
+ *
+ * @param {string} file
+ * @return {string}
+ * @api public
+ */
+
+ tryLoading : function(file) {
+ try { return JSpec.load(file) } catch (e) {}
+ },
+
+ /**
+ * Load a _file_'s contents.
+ *
+ * @param {string} file
+ * @param {function} callback
+ * @return {string}
+ * @api public
+ */
+
+ load : function(file, callback) {
+ if (any(hook('loading', file), haveStopped)) return
+ if ('readFile' in main)
+ return readFile(file)
+ else if (this.hasXhr()) {
+ var request = this.xhr()
+ request.open('GET', file, false)
+ request.send(null)
+ if (request.readyState == 4 &&
+ (request.status == 0 ||
+ request.status.toString().charAt(0) == 2))
+ return request.responseText
+ }
+ else
+ error("failed to load `" + file + "'")
+ },
+
+ /**
+ * Load, pre-process, and evaluate a file.
+ *
+ * @param {string} file
+ * @param {JSpec}
+ * @api public
+ */
+
+ exec : function(file) {
+ if (any(hook('executing', file), haveStopped)) return this
+ eval('with (JSpec){' + this.preprocess(this.load(file)) + '}')
+ return this
+ }
+ }
+
+ // --- Node.js support
+
+ if (typeof GLOBAL === 'object' && typeof exports === 'object')
+ quit = process.exit,
+ print = require('sys').puts,
+ readFile = require('fs').readFileSync
+
+ // --- Utility functions
+
+ var main = this,
+ find = JSpec.any,
+ utils = 'haveStopped stub hookImmutable hook destub map any last pass fail range each option inject select \
+ error escape extend puts query strip color does addMatchers callIterator toArray equal'.split(/\s+/)
+ while (utils.length) eval('var ' + utils[0] + ' = JSpec.' + utils.shift())
+ if (!main.setTimeout) main.setTimeout = function(callback){ callback() }
+
+ // --- Matchers
+
+ addMatchers({
+ equal : "===",
+ eql : "equal(actual, expected)",
+ be : "alias equal",
+ be_greater_than : ">",
+ be_less_than : "<",
+ be_at_least : ">=",
+ be_at_most : "<=",
+ be_a : "actual.constructor == expected",
+ be_an : "alias be_a",
+ be_an_instance_of : "actual instanceof expected",
+ be_null : "actual == null",
+ be_true : "actual == true",
+ be_false : "actual == false",
+ be_undefined : "typeof actual == 'undefined'",
+ be_type : "typeof actual == expected",
+ match : "typeof actual == 'string' ? actual.match(expected) : false",
+ respond_to : "typeof actual[expected] == 'function'",
+ have_length : "actual.length == expected",
+ be_within : "actual >= expected[0] && actual <= last(expected)",
+ have_length_within : "actual.length >= expected[0] && actual.length <= last(expected)",
+
+ receive : { defer : true, match : function(actual, method, times) {
+ proxy = new JSpec.ProxyAssertion(actual, method, times, this.negate)
+ JSpec.currentSpec.assertions.push(proxy)
+ return proxy
+ }},
+
+ be_empty : function(actual) {
+ if (actual.constructor == Object && actual.length == undefined)
+ for (var key in actual)
+ return false;
+ return !actual.length
+ },
+
+ include : function(actual) {
+ for (state = true, i = 1; i < arguments.length; i++) {
+ arg = arguments[i]
+ switch (actual.constructor) {
+ case String:
+ case Number:
+ case RegExp:
+ case Function:
+ state = actual.toString().indexOf(arg) !== -1
+ break
+
+ case Object:
+ state = arg in actual
+ break
+
+ case Array:
+ state = any(actual, function(value){ return equal(value, arg) })
+ break
+ }
+ if (!state) return false
+ }
+ return true
+ },
+
+ throw_error : { match : function(actual, expected, message) {
+ try { actual() }
+ catch (e) {
+ this.e = e
+ var assert = function(arg) {
+ switch (arg.constructor) {
+ case RegExp : return arg.test(e.message || e.toString())
+ case String : return arg == (e.message || e.toString())
+ case Function : return e instanceof arg || e.name == arg.name
+ }
+ }
+ return message ? assert(expected) && assert(message) :
+ expected ? assert(expected) :
+ true
+ }
+ }, message : function(actual, expected, negate) {
+ // TODO: refactor when actual is not in expected [0]
+ var message_for = function(i) {
+ if (expected[i] == undefined) return 'exception'
+ switch (expected[i].constructor) {
+ case RegExp : return 'exception matching ' + puts(expected[i])
+ case String : return 'exception of ' + puts(expected[i])
+ case Function : return expected[i].name || 'Error'
+ }
+ }
+ exception = message_for(1) + (expected[2] ? ' and ' + message_for(2) : '')
+ return 'expected ' + exception + (negate ? ' not ' : '' ) +
+ ' to be thrown, but ' + (this.e ? 'got ' + puts(this.e) : 'nothing was')
+ }},
+
+ have : function(actual, length, property) {
+ return actual[property].length == length
+ },
+
+ have_at_least : function(actual, length, property) {
+ return actual[property].length >= length
+ },
+
+ have_at_most :function(actual, length, property) {
+ return actual[property].length <= length
+ },
+
+ have_within : function(actual, range, property) {
+ length = actual[property].length
+ return length >= range.shift() && length <= range.pop()
+ },
+
+ have_prop : function(actual, property, value) {
+ return actual[property] == null ||
+ actual[property] instanceof Function ? false:
+ value == null ? true:
+ does(actual[property], 'eql', value)
+ },
+
+ have_property : function(actual, property, value) {
+ return actual[property] == null ||
+ actual[property] instanceof Function ? false:
+ value == null ? true:
+ value === actual[property]
+ }
+ })
+
+})()
diff --git a/share/www/script/jspec/jspec.xhr.js b/share/www/script/jspec/jspec.xhr.js
new file mode 100644
index 00000000..61648795
--- /dev/null
+++ b/share/www/script/jspec/jspec.xhr.js
@@ -0,0 +1,195 @@
+
+// JSpec - XHR - Copyright TJ Holowaychuk <tj@vision-media.ca> (MIT Licensed)
+
+(function(){
+
+ var lastRequest
+
+ // --- Original XMLHttpRequest
+
+ var OriginalXMLHttpRequest = 'XMLHttpRequest' in this ?
+ XMLHttpRequest :
+ function(){}
+ var OriginalActiveXObject = 'ActiveXObject' in this ?
+ ActiveXObject :
+ undefined
+
+ // --- MockXMLHttpRequest
+
+ var MockXMLHttpRequest = function() {
+ this.requestHeaders = {}
+ }
+
+ MockXMLHttpRequest.prototype = {
+ status: 0,
+ async: true,
+ readyState: 0,
+ responseText: '',
+ abort: function(){},
+ onreadystatechange: function(){},
+
+ /**
+ * Return response headers hash.
+ */
+
+ getAllResponseHeaders : function(){
+ return this.responseHeaders
+ },
+
+ /**
+ * Return case-insensitive value for header _name_.
+ */
+
+ getResponseHeader : function(name) {
+ return this.responseHeaders[name.toLowerCase()]
+ },
+
+ /**
+ * Set case-insensitive _value_ for header _name_.
+ */
+
+ setRequestHeader : function(name, value) {
+ this.requestHeaders[name.toLowerCase()] = value
+ },
+
+ /**
+ * Open mock request.
+ */
+
+ open : function(method, url, async, user, password) {
+ this.user = user
+ this.password = password
+ this.url = url
+ this.readyState = 1
+ this.method = method.toUpperCase()
+ if (async != undefined) this.async = async
+ if (this.async) this.onreadystatechange()
+ },
+
+ /**
+ * Send request _data_.
+ */
+
+ send : function(data) {
+ var self = this
+ this.data = data
+ this.readyState = 4
+ if (this.method == 'HEAD') this.responseText = null
+ this.responseHeaders['content-length'] = (this.responseText || '').length
+ if(this.async) this.onreadystatechange()
+ lastRequest = function(){
+ return self
+ }
+ }
+ }
+
+ // --- Response status codes
+
+ JSpec.statusCodes = {
+ 100: 'Continue',
+ 101: 'Switching Protocols',
+ 200: 'OK',
+ 201: 'Created',
+ 202: 'Accepted',
+ 203: 'Non-Authoritative Information',
+ 204: 'No Content',
+ 205: 'Reset Content',
+ 206: 'Partial Content',
+ 300: 'Multiple Choice',
+ 301: 'Moved Permanently',
+ 302: 'Found',
+ 303: 'See Other',
+ 304: 'Not Modified',
+ 305: 'Use Proxy',
+ 307: 'Temporary Redirect',
+ 400: 'Bad Request',
+ 401: 'Unauthorized',
+ 402: 'Payment Required',
+ 403: 'Forbidden',
+ 404: 'Not Found',
+ 405: 'Method Not Allowed',
+ 406: 'Not Acceptable',
+ 407: 'Proxy Authentication Required',
+ 408: 'Request Timeout',
+ 409: 'Conflict',
+ 410: 'Gone',
+ 411: 'Length Required',
+ 412: 'Precondition Failed',
+ 413: 'Request Entity Too Large',
+ 414: 'Request-URI Too Long',
+ 415: 'Unsupported Media Type',
+ 416: 'Requested Range Not Satisfiable',
+ 417: 'Expectation Failed',
+ 422: 'Unprocessable Entity',
+ 500: 'Internal Server Error',
+ 501: 'Not Implemented',
+ 502: 'Bad Gateway',
+ 503: 'Service Unavailable',
+ 504: 'Gateway Timeout',
+ 505: 'HTTP Version Not Supported'
+ }
+
+ /**
+ * Mock XMLHttpRequest requests.
+ *
+ * mockRequest().and_return('some data', 'text/plain', 200, { 'X-SomeHeader' : 'somevalue' })
+ *
+ * @return {hash}
+ * @api public
+ */
+
+ function mockRequest() {
+ return { and_return : function(body, type, status, headers) {
+ XMLHttpRequest = MockXMLHttpRequest
+ ActiveXObject = false
+ status = status || 200
+ headers = headers || {}
+ headers['content-type'] = type
+ JSpec.extend(XMLHttpRequest.prototype, {
+ responseText: body,
+ responseHeaders: headers,
+ status: status,
+ statusText: JSpec.statusCodes[status]
+ })
+ }}
+ }
+
+ /**
+ * Unmock XMLHttpRequest requests.
+ *
+ * @api public
+ */
+
+ function unmockRequest() {
+ XMLHttpRequest = OriginalXMLHttpRequest
+ ActiveXObject = OriginalActiveXObject
+ }
+
+ JSpec.include({
+ name: 'Mock XHR',
+
+ // --- Utilities
+
+ utilities : {
+ mockRequest: mockRequest,
+ unmockRequest: unmockRequest
+ },
+
+ // --- Hooks
+
+ afterSpec : function() {
+ unmockRequest()
+ },
+
+ // --- DSLs
+
+ DSLs : {
+ snake : {
+ mock_request: mockRequest,
+ unmock_request: unmockRequest,
+ last_request: function(){ return lastRequest() }
+ }
+ }
+
+ })
+})() \ No newline at end of file
diff --git a/share/www/script/oauth.js b/share/www/script/oauth.js
index aa0019d5..ada00a27 100644
--- a/share/www/script/oauth.js
+++ b/share/www/script/oauth.js
@@ -78,7 +78,7 @@ OAuth.setProperties(OAuth, // utility functions
}
if (s instanceof Array) {
var e = "";
- for (var i = 0; i < s.length; ++s) {
+ for (var i = 0; i < s.length; ++i) {
if (e != "") e += '&';
e += percentEncode(s[i]);
}
diff --git a/share/www/script/test/attachment_names.js b/share/www/script/test/attachment_names.js
index d90c24c4..988dd2d2 100644
--- a/share/www/script/test/attachment_names.js
+++ b/share/www/script/test/attachment_names.js
@@ -24,7 +24,7 @@ couchTests.attachment_names = function(debug) {
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
- }
+ };
// inline attachments
try {
@@ -72,7 +72,7 @@ couchTests.attachment_names = function(debug) {
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
- }
+ };
try {
db.save(binAttDoc);
diff --git a/share/www/script/test/attachment_paths.js b/share/www/script/test/attachment_paths.js
index a2a0f69c..3f6ffb7c 100644
--- a/share/www/script/test/attachment_paths.js
+++ b/share/www/script/test/attachment_paths.js
@@ -33,7 +33,7 @@ couchTests.attachment_paths = function(debug) {
data: "V2UgbGlrZSBwZXJjZW50IHR3byBGLg=="
}
}
- }
+ };
T(db.save(binAttDoc).ok);
@@ -73,7 +73,10 @@ couchTests.attachment_paths = function(debug) {
T(binAttDoc._attachments["foo/bar.txt"] !== undefined);
T(binAttDoc._attachments["foo%2Fbaz.txt"] !== undefined);
T(binAttDoc._attachments["foo/bar2.txt"] !== undefined);
- T(binAttDoc._attachments["foo/bar2.txt"].content_type == "text/plain;charset=utf-8");
+ TEquals("text/plain;charset=utf-8", // thank you Safari
+ binAttDoc._attachments["foo/bar2.txt"].content_type.toLowerCase(),
+ "correct content-type"
+ );
T(binAttDoc._attachments["foo/bar2.txt"].length == 30);
//// now repeat the while thing with a design doc
@@ -92,7 +95,7 @@ couchTests.attachment_paths = function(debug) {
data: "V2UgbGlrZSBwZXJjZW50IHR3byBGLg=="
}
}
- }
+ };
T(db.save(binAttDoc).ok);
@@ -141,7 +144,10 @@ couchTests.attachment_paths = function(debug) {
T(binAttDoc._attachments["foo/bar.txt"] !== undefined);
T(binAttDoc._attachments["foo/bar2.txt"] !== undefined);
- T(binAttDoc._attachments["foo/bar2.txt"].content_type == "text/plain;charset=utf-8");
+ TEquals("text/plain;charset=utf-8", // thank you Safari
+ binAttDoc._attachments["foo/bar2.txt"].content_type.toLowerCase(),
+ "correct content-type"
+ );
T(binAttDoc._attachments["foo/bar2.txt"].length == 30);
}
};
diff --git a/share/www/script/test/attachment_ranges.js b/share/www/script/test/attachment_ranges.js
new file mode 100644
index 00000000..e1d40eae
--- /dev/null
+++ b/share/www/script/test/attachment_ranges.js
@@ -0,0 +1,134 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+couchTests.attachment_ranges = function(debug) {
+ var db = new CouchDB("test_suite_db", {
+ "X-Couch-Full-Commit": "false"
+ });
+ db.deleteDb();
+ db.createDb();
+
+ if (debug) debugger;
+
+ var binAttDoc = {
+ _id: "bin_doc",
+ _attachments: {
+ "foo.txt": {
+ content_type: "application/octet-stream",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ }
+ };
+
+ var save_response = db.save(binAttDoc);
+ T(save_response.ok);
+
+ // Fetching the whole entity is a 206.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-28"
+ }
+ });
+ TEquals(206, xhr.status, "fetch 0-28");
+ TEquals("This is a base64 encoded text", xhr.responseText);
+ TEquals("bytes 0-28/29", xhr.getResponseHeader("Content-Range"));
+ TEquals("29", xhr.getResponseHeader("Content-Length"));
+
+ // Fetch the whole entity without an end offset is a 206.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-"
+ }
+ });
+ TEquals(206, xhr.status, "fetch 0-");
+ TEquals("This is a base64 encoded text", xhr.responseText);
+ TEquals("bytes 0-28/29", xhr.getResponseHeader("Content-Range"));
+ TEquals("29", xhr.getResponseHeader("Content-Length"));
+
+ // Badly formed range header is a 200.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes:0-"
+ }
+ });
+ TEquals(200, xhr.status, "fetch with bad range header");
+
+ // Fetch the end of an entity without an end offset is a 206.
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=2-"
+ }
+ });
+ TEquals(206, xhr.status, "fetch 2-");
+ TEquals("is is a base64 encoded text", xhr.responseText);
+ TEquals("bytes 2-28/29", xhr.getResponseHeader("Content-Range"));
+ TEquals("27", xhr.getResponseHeader("Content-Length"));
+
+ // Fetch past the end of the entity is a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-29"
+ }
+ });
+ TEquals(206, xhr.status, "fetch 0-29");
+ TEquals("bytes 0-28/29", xhr.getResponseHeader("Content-Range"));
+ TEquals("29", xhr.getResponseHeader("Content-Length"));
+
+ // Fetch first part of entity is a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=0-3"
+ }
+ });
+ TEquals(206, xhr.status, "fetch 0-3");
+ TEquals("This", xhr.responseText);
+ TEquals("4", xhr.getResponseHeader("Content-Length"));
+ TEquals("bytes 0-3/29", xhr.getResponseHeader("Content-Range"));
+
+ // Fetch middle of entity is also a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=10-15"
+ }
+ });
+ TEquals(206, xhr.status, "fetch 10-15");
+ TEquals("base64", xhr.responseText);
+ TEquals("6", xhr.getResponseHeader("Content-Length"));
+ TEquals("bytes 10-15/29", xhr.getResponseHeader("Content-Range"));
+
+ // Fetch end of entity is also a 206
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=-3"
+ }
+ });
+ TEquals(206, xhr.status, "fetch -3");
+ TEquals("ext", xhr.responseText);
+ TEquals("3", xhr.getResponseHeader("Content-Length"));
+ TEquals("bytes 26-28/29", xhr.getResponseHeader("Content-Range"));
+
+ // backward range is 416
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=5-3"
+ }
+ });
+ TEquals(416, xhr.status, "fetch 5-3");
+
+ // range completely outside of entity is 416
+ var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt", {
+ headers: {
+ "Range": "bytes=300-310"
+ }
+ });
+ TEquals(416, xhr.status, "fetch 300-310");
+
+};
diff --git a/share/www/script/test/attachment_views.js b/share/www/script/test/attachment_views.js
index fd30dcfc..a92a8ad0 100644
--- a/share/www/script/test/attachment_views.js
+++ b/share/www/script/test/attachment_views.js
@@ -68,11 +68,11 @@ couchTests.attachment_views= function(debug) {
}
emit(parseInt(doc._id), count);
- }
+ };
var reduceFunction = function(key, values) {
return sum(values);
- }
+ };
var result = db.query(mapFunction, reduceFunction);
diff --git a/share/www/script/test/attachments.js b/share/www/script/test/attachments.js
index 36f5a5ad..b0cfd2c5 100644
--- a/share/www/script/test/attachments.js
+++ b/share/www/script/test/attachments.js
@@ -24,7 +24,7 @@ couchTests.attachments= function(debug) {
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
- }
+ };
var save_response = db.save(binAttDoc);
T(save_response.ok);
@@ -68,12 +68,12 @@ couchTests.attachments= function(debug) {
T(binAttDoc2._attachments["foo.txt"] !== undefined);
T(binAttDoc2._attachments["foo2.txt"] !== undefined);
- T(binAttDoc2._attachments["foo2.txt"].content_type == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", binAttDoc2._attachments["foo2.txt"].content_type);
T(binAttDoc2._attachments["foo2.txt"].length == 30);
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc2/foo2.txt");
T(xhr.responseText == "This is no base64 encoded text");
- T(xhr.getResponseHeader("Content-Type") == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
// test without rev, should fail
var xhr = CouchDB.request("DELETE", "/test_suite_db/bin_doc2/foo2.txt");
@@ -93,10 +93,11 @@ couchTests.attachments= function(debug) {
});
T(xhr.status == 201);
var rev = JSON.parse(xhr.responseText).rev;
+ TEquals('"' + rev + '"', xhr.getResponseHeader("Etag"));
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc3/attachment.txt");
T(xhr.responseText == bin_data);
- T(xhr.getResponseHeader("Content-Type") == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
var xhr = CouchDB.request("PUT", "/test_suite_db/bin_doc3/attachment.txt", {
headers:{"Content-Type":"text/plain;charset=utf-8"},
@@ -110,14 +111,15 @@ couchTests.attachments= function(debug) {
});
T(xhr.status == 201);
var rev = JSON.parse(xhr.responseText).rev;
+ TEquals('"' + rev + '"', xhr.getResponseHeader("Etag"));
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc3/attachment.txt");
T(xhr.responseText == bin_data);
- T(xhr.getResponseHeader("Content-Type") == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc3/attachment.txt?rev=" + rev);
T(xhr.responseText == bin_data);
- T(xhr.getResponseHeader("Content-Type") == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
var xhr = CouchDB.request("DELETE", "/test_suite_db/bin_doc3/attachment.txt?rev=" + rev);
T(xhr.status == 200);
@@ -129,7 +131,7 @@ couchTests.attachments= function(debug) {
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc3/attachment.txt?rev=" + rev);
T(xhr.status == 200);
T(xhr.responseText == bin_data);
- T(xhr.getResponseHeader("Content-Type") == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
// empty attachments
var xhr = CouchDB.request("PUT", "/test_suite_db/bin_doc4/attachment.txt", {
@@ -156,7 +158,7 @@ couchTests.attachments= function(debug) {
// Attachment sparseness COUCHDB-220
- var docs = []
+ var docs = [];
for (var i = 0; i < 5; i++) {
var doc = {
_id: (i).toString(),
@@ -166,8 +168,8 @@ couchTests.attachments= function(debug) {
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
- }
- docs.push(doc)
+ };
+ docs.push(doc);
}
var saved = db.bulkSave(docs);
@@ -210,7 +212,7 @@ couchTests.attachments= function(debug) {
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc5/lorem.txt");
T(xhr.responseText == lorem);
- T(xhr.getResponseHeader("Content-Type") == "text/plain;charset=utf-8");
+ TEqualsIgnoreCase("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
// test large inline attachment too
var lorem_b64 = CouchDB.request("GET", "/_utils/script/test/lorem_b64.txt").responseText;
@@ -244,4 +246,30 @@ couchTests.attachments= function(debug) {
body: "THIS IS AN ATTACHMENT. BOOYA!"
});
TEquals(400, xhr.status, "should return error code 400 Bad Request");
+
+ // test COUCHDB-809 - stubs should only require the 'stub' field
+ var bin_doc6 = {
+ _id: "bin_doc6",
+ _attachments:{
+ "foo.txt": {
+ content_type:"text/plain",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ }
+ };
+ T(db.save(bin_doc6).ok);
+ // stub out the attachment
+ bin_doc6._attachments["foo.txt"] = { stub: true };
+ T(db.save(bin_doc6).ok == true);
+
+ // wrong rev pos specified
+
+ // stub out the attachment with the wrong revpos
+ bin_doc6._attachments["foo.txt"] = { stub: true, revpos: 10};
+ try {
+ T(db.save(bin_doc6).ok == true);
+ T(false && "Shouldn't get here!");
+ } catch (e) {
+ T(e.error == "missing_stub");
+ }
};
diff --git a/share/www/script/test/attachments_multipart.js b/share/www/script/test/attachments_multipart.js
index 2b79e559..96fe344f 100644
--- a/share/www/script/test/attachments_multipart.js
+++ b/share/www/script/test/attachments_multipart.js
@@ -58,7 +58,7 @@ couchTests.attachments_multipart= function(debug) {
var result = JSON.parse(xhr.responseText);
- T(result.ok)
+ T(result.ok);
@@ -115,8 +115,11 @@ couchTests.attachments_multipart= function(debug) {
// now test receiving multipart docs
function getBoundary(xhr) {
- var ctype = xhr.getResponseHeader("Content-Type");
-
+ if (xhr instanceof XMLHttpRequest) {
+ var ctype = xhr.getResponseHeader("Content-Type");
+ } else {
+ var ctype = xhr.headers['Content-Type'];
+ }
var ctypeArgs = ctype.split("; ").slice(1);
var boundary = null;
for(var i=0; i<ctypeArgs.length; i++) {
@@ -134,7 +137,11 @@ couchTests.attachments_multipart= function(debug) {
function parseMultipart(xhr) {
var boundary = getBoundary(xhr);
- var mimetext = xhr.responseText;
+ if (xhr instanceof XMLHttpRequest) {
+ var mimetext = xhr.responseText;
+ } else {
+ var mimetext = xhr.body;
+ }
// strip off leading boundary
var leading = "--" + boundary + "\r\n";
var last = "\r\n--" + boundary + "--";
@@ -193,7 +200,7 @@ couchTests.attachments_multipart= function(debug) {
// a certain rev).
xhr = CouchDB.request("GET", "/test_suite_db/multipart?atts_since=[\"" + firstrev + "\"]",
- {headers:{"accept": "multipart/related,*/*;"}});
+ {headers:{"accept": "multipart/related, */*"}});
T(xhr.status == 200);
@@ -207,7 +214,34 @@ couchTests.attachments_multipart= function(debug) {
T(doc._attachments['bar.txt'].follows == true);
T(sections[1].body == "this is 18 chars l");
-
+
+ // try the atts_since parameter together with the open_revs parameter
+ xhr = CouchDB.request(
+ "GET",
+ '/test_suite_db/multipart?open_revs=["' +
+ doc._rev + '"]&atts_since=["' + firstrev + '"]',
+ {headers: {"accept": "multipart/mixed"}}
+ );
+
+ T(xhr.status === 200);
+
+ sections = parseMultipart(xhr);
+ // 1 section, with a multipart/related Content-Type
+ T(sections.length === 1);
+ T(sections[0].headers['Content-Type'].indexOf('multipart/related;') === 0);
+
+ var innerSections = parseMultipart(sections[0]);
+ // 2 inner sections: a document body section plus an attachment data section
+ T(innerSections.length === 2);
+ T(innerSections[0].headers['content-type'] === 'application/json');
+
+ doc = JSON.parse(innerSections[0].body);
+
+ T(doc._attachments['foo.txt'].stub === true);
+ T(doc._attachments['bar.txt'].follows === true);
+
+ T(innerSections[1].body === "this is 18 chars l");
+
// try it with a rev that doesn't exist (should get all attachments)
xhr = CouchDB.request("GET", "/test_suite_db/multipart?atts_since=[\"1-2897589\"]",
diff --git a/share/www/script/test/auth_cache.js b/share/www/script/test/auth_cache.js
new file mode 100644
index 00000000..e48f7370
--- /dev/null
+++ b/share/www/script/test/auth_cache.js
@@ -0,0 +1,280 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+couchTests.auth_cache = function(debug) {
+
+ if (debug) debugger;
+
+ // Simple secret key generator
+ function generateSecret(length) {
+ var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +
+ "0123456789+/";
+ var secret = '';
+ for (var i = 0; i < length; i++) {
+ secret += tab.charAt(Math.floor(Math.random() * 64));
+ }
+ return secret;
+ }
+
+ var authDb = new CouchDB("test_suite_users", {"X-Couch-Full-Commit":"false"});
+ var server_config = [
+ {
+ section: "couch_httpd_auth",
+ key: "authentication_db",
+ value: authDb.name
+ },
+ {
+ section: "couch_httpd_auth",
+ key: "auth_cache_size",
+ value: "3"
+ },
+ {
+ section: "httpd",
+ key: "authentication_handlers",
+ value: "{couch_httpd_auth, default_authentication_handler}"
+ },
+ {
+ section: "couch_httpd_auth",
+ key: "secret",
+ value: generateSecret(64)
+ }
+ ];
+
+
+ function hits() {
+ var hits = CouchDB.requestStats("couchdb", "auth_cache_hits", true);
+ return hits.current || 0;
+ }
+
+
+ function misses() {
+ var misses = CouchDB.requestStats("couchdb", "auth_cache_misses", true);
+ return misses.current || 0;
+ }
+
+
+ function testFun() {
+ var hits_before,
+ misses_before,
+ hits_after,
+ misses_after;
+
+ var fdmanana = CouchDB.prepareUserDoc({
+ name: "fdmanana",
+ roles: ["dev"]
+ }, "qwerty");
+
+ T(authDb.save(fdmanana).ok);
+
+ var chris = CouchDB.prepareUserDoc({
+ name: "chris",
+ roles: ["dev", "mafia", "white_costume"]
+ }, "the_god_father");
+
+ T(authDb.save(chris).ok);
+
+ var joe = CouchDB.prepareUserDoc({
+ name: "joe",
+ roles: ["erlnager"]
+ }, "functional");
+
+ T(authDb.save(joe).ok);
+
+ var johndoe = CouchDB.prepareUserDoc({
+ name: "johndoe",
+ roles: ["user"]
+ }, "123456");
+
+ T(authDb.save(johndoe).ok);
+
+ hits_before = hits();
+ misses_before = misses();
+
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === (misses_before + 1));
+ T(hits_after === hits_before);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.logout().ok);
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === misses_before);
+ T(hits_after === (hits_before + 1));
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.logout().ok);
+ T(CouchDB.login("chris", "the_god_father").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === (misses_before + 1));
+ T(hits_after === hits_before);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.logout().ok);
+ T(CouchDB.login("joe", "functional").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === (misses_before + 1));
+ T(hits_after === hits_before);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.logout().ok);
+ T(CouchDB.login("johndoe", "123456").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === (misses_before + 1));
+ T(hits_after === hits_before);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.logout().ok);
+ T(CouchDB.login("joe", "functional").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ // it's an MRU cache, joe was removed from cache to add johndoe
+ T(misses_after === (misses_before + 1));
+ T(hits_after === hits_before);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.logout().ok);
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === misses_before);
+ T(hits_after === (hits_before + 1));
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ var new_salt = CouchDB.newUuids(1)[0];
+ var new_passwd = hex_sha1("foobar" + new_salt);
+ fdmanana.salt = new_salt;
+ fdmanana.password_sha = new_passwd;
+
+ T(authDb.save(fdmanana).ok);
+ T(CouchDB.logout().ok);
+
+ // cache was refreshed
+ T(CouchDB.login("fdmanana", "qwerty").error === "unauthorized");
+ T(CouchDB.login("fdmanana", "foobar").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === misses_before);
+ T(hits_after === (hits_before + 2));
+
+ T(CouchDB.logout().ok);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ // and yet another update
+ new_salt = CouchDB.newUuids(1)[0];
+ new_passwd = hex_sha1("javascript" + new_salt);
+ fdmanana.salt = new_salt;
+ fdmanana.password_sha = new_passwd;
+
+ T(authDb.save(fdmanana).ok);
+ T(CouchDB.logout().ok);
+
+ // cache was refreshed
+ T(CouchDB.login("fdmanana", "foobar").error === "unauthorized");
+ T(CouchDB.login("fdmanana", "javascript").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === misses_before);
+ T(hits_after === (hits_before + 2));
+
+ T(authDb.deleteDoc(fdmanana).ok);
+ T(CouchDB.logout().ok);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.login("fdmanana", "javascript").error === "unauthorized");
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === misses_before);
+ T(hits_after === (hits_before + 1));
+
+ // login, compact authentication DB, login again and verify that
+ // there was a cache hit
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.login("johndoe", "123456").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === (misses_before + 1));
+ T(hits_after === hits_before);
+
+ T(CouchDB.logout().ok);
+ T(authDb.compact().ok);
+
+ while (authDb.info().compact_running);
+
+ hits_before = hits_after;
+ misses_before = misses_after;
+
+ T(CouchDB.login("johndoe", "123456").ok);
+
+ hits_after = hits();
+ misses_after = misses();
+
+ T(misses_after === misses_before);
+ T(hits_after === (hits_before + 1));
+
+ T(CouchDB.logout().ok);
+ }
+
+
+ authDb.deleteDb();
+ run_on_modified_server(server_config, testFun);
+
+ // cleanup
+ authDb.deleteDb();
+} \ No newline at end of file
diff --git a/share/www/script/test/basics.js b/share/www/script/test/basics.js
index 0f9ac44f..30c27c11 100644
--- a/share/www/script/test/basics.js
+++ b/share/www/script/test/basics.js
@@ -37,15 +37,14 @@ couchTests.basics = function(debug) {
TEquals(dbname,
xhr.getResponseHeader("Location").substr(-dbname.length),
"should return Location header to newly created document");
-
- TEquals("http://",
- xhr.getResponseHeader("Location").substr(0, 7),
+ TEquals(CouchDB.protocol,
+ xhr.getResponseHeader("Location").substr(0, CouchDB.protocol.length),
"should return absolute Location header to newly created document");
});
// Get the database info, check the db_name
T(db.info().db_name == "test_suite_db");
- T(CouchDB.allDbs().indexOf("test_suite_db") != -1)
+ T(CouchDB.allDbs().indexOf("test_suite_db") != -1);
// Get the database info, check the doc_count
T(db.info().doc_count == 0);
@@ -91,13 +90,13 @@ couchTests.basics = function(debug) {
emit(null, doc.b);
};
- results = db.query(mapFunction);
+ var results = db.query(mapFunction);
// verify only one document found and the result value (doc.b).
T(results.total_rows == 1 && results.rows[0].value == 16);
// reopen document we saved earlier
- existingDoc = db.open(id);
+ var existingDoc = db.open(id);
T(existingDoc.a==1);
@@ -152,18 +151,20 @@ couchTests.basics = function(debug) {
// test that the POST response has a Location header
var xhr = CouchDB.request("POST", "/test_suite_db", {
- body: JSON.stringify({"foo":"bar"})
+ body: JSON.stringify({"foo":"bar"}),
+ headers: {"Content-Type": "application/json"}
});
var resp = JSON.parse(xhr.responseText);
T(resp.ok);
var loc = xhr.getResponseHeader("Location");
T(loc, "should have a Location header");
var locs = loc.split('/');
- T(locs[4] == resp.id);
- T(locs[3] == "test_suite_db");
+ T(locs[locs.length-1] == resp.id);
+ T(locs[locs.length-2] == "test_suite_db");
// test that that POST's with an _id aren't overriden with a UUID.
var xhr = CouchDB.request("POST", "/test_suite_db", {
+ headers: {"Content-Type": "application/json"},
body: JSON.stringify({"_id": "oppossum", "yar": "matey"})
});
var resp = JSON.parse(xhr.responseText);
@@ -179,9 +180,8 @@ couchTests.basics = function(debug) {
TEquals("/test_suite_db/newdoc",
xhr.getResponseHeader("Location").substr(-21),
"should return Location header to newly created document");
-
- TEquals("http://",
- xhr.getResponseHeader("Location").substr(0, 7),
+ TEquals(CouchDB.protocol,
+ xhr.getResponseHeader("Location").substr(0, CouchDB.protocol.length),
"should return absolute Location header to newly created document");
// deleting a non-existent doc should be 404
@@ -189,12 +189,12 @@ couchTests.basics = function(debug) {
T(xhr.status == 404);
// Check for invalid document members
- bad_docs = [
+ var bad_docs = [
["goldfish", {"_zing": 4}],
["zebrafish", {"_zoom": "hello"}],
["mudfish", {"zane": "goldfish", "_fan": "something smells delicious"}],
["tastyfish", {"_bing": {"wha?": "soda can"}}]
- ]
+ ];
var test_doc = function(info) {
var data = JSON.stringify(info[1]);
xhr = CouchDB.request("PUT", "/test_suite_db/" + info[0], {body: data});
@@ -202,7 +202,10 @@ couchTests.basics = function(debug) {
result = JSON.parse(xhr.responseText);
T(result.error == "doc_validation");
- xhr = CouchDB.request("POST", "/test_suite_db/", {body: data});
+ xhr = CouchDB.request("POST", "/test_suite_db/", {
+ headers: {"Content-Type": "application/json"},
+ body: data
+ });
T(xhr.status == 500);
result = JSON.parse(xhr.responseText);
T(result.error == "doc_validation");
diff --git a/share/www/script/test/batch_save.js b/share/www/script/test/batch_save.js
index 1c8a2be9..a1b00192 100644
--- a/share/www/script/test/batch_save.js
+++ b/share/www/script/test/batch_save.js
@@ -36,7 +36,10 @@ couchTests.batch_save = function(debug) {
// repeat the tests for POST
for(i=0; i < 100; i++) {
- var resp = db.request("POST", db.uri + "?batch=ok", {body: JSON.stringify({a:1})});
+ var resp = db.request("POST", db.uri + "?batch=ok", {
+ headers: {"Content-Type": "application/json"},
+ body: JSON.stringify({a:1})
+ });
T(JSON.parse(resp.responseText).ok);
}
diff --git a/share/www/script/test/bulk_docs.js b/share/www/script/test/bulk_docs.js
index 346aea83..9095e6b3 100644
--- a/share/www/script/test/bulk_docs.js
+++ b/share/www/script/test/bulk_docs.js
@@ -51,12 +51,12 @@ couchTests.bulk_docs = function(debug) {
T(results.length == 5);
T(results[0].id == "0");
T(results[0].error == "conflict");
- T(results[0].rev === undefined); // no rev member when a conflict
+ T(typeof results[0].rev === "undefined"); // no rev member when a conflict
// but the rest are not
for (i = 1; i < 5; i++) {
T(results[i].id == i.toString());
- T(results[i].rev)
+ T(results[i].rev);
T(db.open(docs[i]._id) == null);
}
@@ -64,7 +64,7 @@ couchTests.bulk_docs = function(debug) {
// save doc 0, this will cause a conflict when we save docs[0]
var doc = db.open("0");
- docs[0] = db.open("0")
+ docs[0] = db.open("0");
db.save(doc);
docs[0].shooby = "dooby";
@@ -93,8 +93,8 @@ couchTests.bulk_docs = function(debug) {
// Regression test for failure on update/delete
var newdoc = {"_id": "foobar", "body": "baz"};
T(db.save(newdoc).ok);
- update = {"_id": newdoc._id, "_rev": newdoc._rev, "body": "blam"};
- torem = {"_id": newdoc._id, "_rev": newdoc._rev, "_deleted": true};
+ var update = {"_id": newdoc._id, "_rev": newdoc._rev, "body": "blam"};
+ var torem = {"_id": newdoc._id, "_rev": newdoc._rev, "_deleted": true};
results = db.bulkSave([update, torem]);
T(results[0].error == "conflict" || results[1].error == "conflict");
};
diff --git a/share/www/script/test/changes.js b/share/www/script/test/changes.js
index 0cbf3bd6..7ce3baaa 100644
--- a/share/www/script/test/changes.js
+++ b/share/www/script/test/changes.js
@@ -12,12 +12,12 @@
function jsonp(obj) {
T(jsonp_flag == 0);
- T(obj.results.length == 1 && obj.last_seq==1, "jsonp")
+ T(obj.results.length == 1 && obj.last_seq == 1, "jsonp");
jsonp_flag = 1;
}
couchTests.changes = function(debug) {
- var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"true"});
db.deleteDb();
db.createDb();
if (debug) debugger;
@@ -25,24 +25,32 @@ couchTests.changes = function(debug) {
var req = CouchDB.request("GET", "/test_suite_db/_changes");
var resp = JSON.parse(req.responseText);
- T(resp.results.length == 0 && resp.last_seq==0, "empty db")
+ T(resp.results.length == 0 && resp.last_seq == 0, "empty db");
var docFoo = {_id:"foo", bar:1};
T(db.save(docFoo).ok);
T(db.ensureFullCommit().ok);
+ T(db.open(docFoo._id)._id == docFoo._id);
req = CouchDB.request("GET", "/test_suite_db/_changes");
var resp = JSON.parse(req.responseText);
- T(resp.results.length == 1 && resp.last_seq==1, "one doc db")
- T(resp.results[0].changes[0].rev == docFoo._rev)
+ T(resp.last_seq == 1);
+ T(resp.results.length == 1, "one doc db");
+ T(resp.results[0].changes[0].rev == docFoo._rev);
// test with callback
- var xhr = CouchDB.request("GET", "/test_suite_db/_changes?callback=jsonp");
- T(xhr.status == 200);
- jsonp_flag = 0;
- eval(xhr.responseText);
- T(jsonp_flag == 1);
-
+
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "allow_jsonp",
+ value: "true"}],
+ function() {
+ var xhr = CouchDB.request("GET", "/test_suite_db/_changes?callback=jsonp");
+ T(xhr.status == 200);
+ jsonp_flag = 0;
+ eval(xhr.responseText);
+ T(jsonp_flag == 1);
+ });
req = CouchDB.request("GET", "/test_suite_db/_changes?feed=continuous&timeout=10");
var lines = req.responseText.split("\n");
@@ -70,98 +78,111 @@ couchTests.changes = function(debug) {
// WebKit (last checked on nightly #47686) does fail on processing
// the async-request properly while javascript is executed.
- var sleep = function(msecs) {
- // by making a slow sync request, we allow the waiting XHR request data
- // to be received.
- var req = CouchDB.request("GET", "/_sleep?time=" + msecs);
- T(JSON.parse(req.responseText).ok == true);
- }
-
- xhr.open("GET", "/test_suite_db/_changes?feed=continuous", true);
+ xhr.open("GET", "/test_suite_db/_changes?feed=continuous&timeout=500", true);
xhr.send("");
var docBar = {_id:"bar", bar:1};
db.save(docBar);
+
+ var lines, change1, change2;
+ waitForSuccess(function() {
+ lines = xhr.responseText.split("\n");
+ change1 = JSON.parse(lines[0]);
+ change2 = JSON.parse(lines[1]);
+ if (change2.seq != 2) {
+ throw "bad seq, try again";
+ }
+ }, "bar-only");
- sleep(100);
- var lines = xhr.responseText.split("\n");
-
- var change = JSON.parse(lines[0]);
-
- T(change.seq == 1)
- T(change.id == "foo")
-
- change = JSON.parse(lines[1]);
-
- T(change.seq == 2)
- T(change.id == "bar")
- T(change.changes[0].rev == docBar._rev)
-
+ T(change1.seq == 1);
+ T(change1.id == "foo");
+
+ T(change2.seq == 2);
+ T(change2.id == "bar");
+ T(change2.changes[0].rev == docBar._rev);
+
+
var docBaz = {_id:"baz", baz:1};
db.save(docBaz);
- sleep(100);
- var lines = xhr.responseText.split("\n");
-
- change = JSON.parse(lines[2]);
-
- T(change.seq == 3);
- T(change.id == "baz");
- T(change.changes[0].rev == docBaz._rev);
+ var change3;
+ waitForSuccess(function() {
+ lines = xhr.responseText.split("\n");
+ change3 = JSON.parse(lines[2]);
+ if (change3.seq != 3) {
+ throw "bad seq, try again";
+ }
+ });
+
+ T(change3.seq == 3);
+ T(change3.id == "baz");
+ T(change3.changes[0].rev == docBaz._rev);
xhr = CouchDB.newXhr();
//verify the hearbeat newlines are sent
- xhr.open("GET", "/test_suite_db/_changes?feed=continuous&heartbeat=10", true);
+ xhr.open("GET", "/test_suite_db/_changes?feed=continuous&heartbeat=10&timeout=500", true);
xhr.send("");
+
+ var str;
+ waitForSuccess(function() {
+ str = xhr.responseText;
+ if (str.charAt(str.length - 1) != "\n" || str.charAt(str.length - 2) != "\n") {
+ throw("keep waiting");
+ }
+ }, "heartbeat");
- sleep(100);
-
- var str = xhr.responseText;
-
- T(str.charAt(str.length - 1) == "\n")
- T(str.charAt(str.length - 2) == "\n")
+ T(str.charAt(str.length - 1) == "\n");
+ T(str.charAt(str.length - 2) == "\n");
+ // otherwise we'll continue to receive heartbeats forever
+ xhr.abort();
// test longpolling
xhr = CouchDB.newXhr();
xhr.open("GET", "/test_suite_db/_changes?feed=longpoll", true);
xhr.send("");
-
- sleep(100);
- var lines = xhr.responseText.split("\n");
- T(lines[5]=='"last_seq":3}');
-
+
+ waitForSuccess(function() {
+ lines = xhr.responseText.split("\n");
+ if (lines[5] != '"last_seq":3}') {
+ throw("still waiting");
+ }
+ }, "last_seq");
+
xhr = CouchDB.newXhr();
xhr.open("GET", "/test_suite_db/_changes?feed=longpoll&since=3", true);
xhr.send("");
- sleep(100);
-
var docBarz = {_id:"barz", bar:1};
db.save(docBarz);
-
- sleep(100);
-
- var lines = xhr.responseText.split("\n");
-
+
var parse_changes_line = function(line) {
if (line.charAt(line.length-1) == ",") {
- line = line.substring(0, line.length-1);
+ var linetrimmed = line.substring(0, line.length-1);
+ } else {
+ var linetrimmed = line;
}
- return JSON.parse(line);
- }
-
- change = parse_changes_line(lines[1]);
-
+ return JSON.parse(linetrimmed);
+ };
+
+ waitForSuccess(function() {
+ lines = xhr.responseText.split("\n");
+ if (lines[3] != '"last_seq":4}') {
+ throw("still waiting");
+ }
+ }, "change_lines");
+
+ var change = parse_changes_line(lines[1]);
T(change.seq == 4);
T(change.id == "barz");
T(change.changes[0].rev == docBarz._rev);
T(lines[3]=='"last_seq":4}');
-
+
+
}
// test the filtered changes
@@ -175,9 +196,18 @@ couchTests.changes = function(debug) {
}),
"userCtx" : stringFun(function(doc, req) {
return doc.user && (doc.user == req.userCtx.name);
- })
+ }),
+ "conflicted" : "function(doc, req) { return (doc._conflicts);}"
+ },
+ options : {
+ local_seq : true
+ },
+ views : {
+ local_seq : {
+ map : "function(doc) {emit(doc._local_seq, null)}"
+ }
}
- }
+ };
db.save(ddoc);
@@ -190,7 +220,7 @@ couchTests.changes = function(debug) {
var req = CouchDB.request("GET", "/test_suite_db/_changes?filter=changes_filter/bop");
var resp = JSON.parse(req.responseText);
- T(resp.results.length == 1);
+ T(resp.results.length == 1, "filtered/bop");
req = CouchDB.request("GET", "/test_suite_db/_changes?filter=changes_filter/dynamic&field=woox");
resp = JSON.parse(req.responseText);
@@ -198,15 +228,14 @@ couchTests.changes = function(debug) {
req = CouchDB.request("GET", "/test_suite_db/_changes?filter=changes_filter/dynamic&field=bop");
resp = JSON.parse(req.responseText);
- T(resp.results.length == 1);
+ T(resp.results.length == 1, "changes_filter/dynamic&field=bop");
if (!is_safari && xhr) { // full test requires parallel connections
// filter with longpoll
// longpoll filters full history when run without a since seq
xhr = CouchDB.newXhr();
- xhr.open("GET", "/test_suite_db/_changes?feed=longpoll&filter=changes_filter/bop", true);
+ xhr.open("GET", "/test_suite_db/_changes?feed=longpoll&filter=changes_filter/bop", false);
xhr.send("");
- sleep(100);
var resp = JSON.parse(xhr.responseText);
T(resp.last_seq == 7);
// longpoll waits until a matching change before returning
@@ -215,22 +244,53 @@ couchTests.changes = function(debug) {
xhr.send("");
db.save({"_id":"falsy", "bop" : ""}); // empty string is falsy
db.save({"_id":"bingo","bop" : "bingo"});
- sleep(100);
- var resp = JSON.parse(xhr.responseText);
+
+ waitForSuccess(function() {
+ resp = JSON.parse(xhr.responseText);
+ }, "longpoll-since");
+
T(resp.last_seq == 9);
T(resp.results && resp.results.length > 0 && resp.results[0]["id"] == "bingo", "filter the correct update");
-
- // filter with continuous
- xhr = CouchDB.newXhr();
- xhr.open("GET", "/test_suite_db/_changes?feed=continuous&filter=changes_filter/bop&timeout=200", true);
- xhr.send("");
- db.save({"_id":"rusty", "bop" : "plankton"});
- T(db.ensureFullCommit().ok);
- sleep(300);
- var lines = xhr.responseText.split("\n");
- T(JSON.parse(lines[1]).id == "bingo", lines[1]);
- T(JSON.parse(lines[2]).id == "rusty", lines[2]);
- T(JSON.parse(lines[3]).last_seq == 10, lines[3]);
+ xhr.abort();
+
+ var timeout = 500;
+ var last_seq = 10;
+ while (true) {
+
+ // filter with continuous
+ xhr = CouchDB.newXhr();
+ xhr.open("GET", "/test_suite_db/_changes?feed=continuous&filter=changes_filter/bop&timeout="+timeout, true);
+ xhr.send("");
+
+ db.save({"_id":"rusty", "bop" : "plankton"});
+ T(xhr.readyState != 4, "test client too slow");
+ var rusty = db.open("rusty", {cache_bust : new Date()});
+ T(rusty._id == "rusty");
+
+ waitForSuccess(function() { // throws an error after 5 seconds
+ if (xhr.readyState != 4) {
+ throw("still waiting");
+ }
+ }, "continuous-rusty");
+ lines = xhr.responseText.split("\n");
+ var good = false;
+ try {
+ JSON.parse(lines[3]);
+ good = true;
+ } catch(e) {
+ }
+ if (good) {
+ T(JSON.parse(lines[1]).id == "bingo", lines[1]);
+ T(JSON.parse(lines[2]).id == "rusty", lines[2]);
+ T(JSON.parse(lines[3]).last_seq == last_seq, lines[3]);
+ break;
+ } else {
+ xhr.abort();
+ db.deleteDoc(rusty);
+ timeout = timeout * 2;
+ last_seq = last_seq + 2;
+ }
+ }
}
// error conditions
@@ -257,7 +317,8 @@ couchTests.changes = function(debug) {
var req = CouchDB.request("GET",
"/test_suite_db/_changes?filter=changes_filter/bop&style=all_docs");
var resp = JSON.parse(req.responseText);
- TEquals(3, resp.results.length, "should return matching rows");
+ var expect = (!is_safari && xhr) ? 3: 1;
+ TEquals(expect, resp.results.length, "should return matching rows");
// test for userCtx
run_on_modified_server(
@@ -286,9 +347,119 @@ couchTests.changes = function(debug) {
resp = JSON.parse(req.responseText);
T(resp.results.length == 1, "userCtx");
T(resp.results[0].id == docResp.id);
- });
-
- req = CouchDB.request("GET", "/test_suite_db/_changes?limit=1");
- resp = JSON.parse(req.responseText);
- TEquals(1, resp.results.length)
+ }
+ );
+
+ req = CouchDB.request("GET", "/test_suite_db/_changes?limit=1");
+ resp = JSON.parse(req.responseText);
+ TEquals(1, resp.results.length);
+
+ //filter includes _conflicts
+ var id = db.save({'food' : 'pizza'}).id;
+ db.bulkSave([{_id: id, 'food' : 'pasta'}], {all_or_nothing:true});
+
+ req = CouchDB.request("GET", "/test_suite_db/_changes?filter=changes_filter/conflicted");
+ resp = JSON.parse(req.responseText);
+ T(resp.results.length == 1, "filter=changes_filter/conflicted");
+
+ // test with erlang filter function
+ run_on_modified_server([{
+ section: "native_query_servers",
+ key: "erlang",
+ value: "{couch_native_process, start_link, []}"
+ }], function() {
+ var erl_ddoc = {
+ _id: "_design/erlang",
+ language: "erlang",
+ filters: {
+ foo:
+ 'fun({Doc}, Req) -> ' +
+ ' Value = couch_util:get_value(<<"value">>, Doc),' +
+ ' (Value rem 2) =:= 0' +
+ 'end.'
+ }
+ };
+
+ db.deleteDb();
+ db.createDb();
+ T(db.save(erl_ddoc).ok);
+
+ var req = CouchDB.request("GET", "/test_suite_db/_changes?filter=erlang/foo");
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 0);
+
+ T(db.save({_id: "doc1", value : 1}).ok);
+ T(db.save({_id: "doc2", value : 2}).ok);
+ T(db.save({_id: "doc3", value : 3}).ok);
+ T(db.save({_id: "doc4", value : 4}).ok);
+
+ var req = CouchDB.request("GET", "/test_suite_db/_changes?filter=erlang/foo");
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 2);
+ T(resp.results[0].id === "doc2");
+ T(resp.results[1].id === "doc4");
+
+ // test filtering on docids
+ //
+
+ var options = {
+ headers: {"Content-Type": "application/json"},
+ body: JSON.stringify({"doc_ids": ["something", "anotherthing", "andmore"]})
+ };
+
+ var req = CouchDB.request("POST", "/test_suite_db/_changes?filter=_doc_ids", options);
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 0);
+
+ T(db.save({"_id":"something", "bop" : "plankton"}).ok);
+ var req = CouchDB.request("POST", "/test_suite_db/_changes?filter=_doc_ids", options);
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 1);
+ T(resp.results[0].id === "something");
+
+ T(db.save({"_id":"anotherthing", "bop" : "plankton"}).ok);
+ var req = CouchDB.request("POST", "/test_suite_db/_changes?filter=_doc_ids", options);
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 2);
+ T(resp.results[0].id === "something");
+ T(resp.results[1].id === "anotherthing");
+
+ var docids = JSON.stringify(["something", "anotherthing", "andmore"]),
+ req = CouchDB.request("GET", "/test_suite_db/_changes?filter=_doc_ids&doc_ids="+docids, options);
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 2);
+ T(resp.results[0].id === "something");
+ T(resp.results[1].id === "anotherthing");
+
+ var req = CouchDB.request("GET", "/test_suite_db/_changes?filter=_design");
+ var resp = JSON.parse(req.responseText);
+ T(resp.results.length === 1);
+ T(resp.results[0].id === "_design/erlang");
+
+
+ if (!is_safari && xhr) {
+ // filter docids with continuous
+ xhr = CouchDB.newXhr();
+ xhr.open("POST", "/test_suite_db/_changes?feed=continuous&timeout=500&since=7&filter=_doc_ids", true);
+ xhr.setRequestHeader("Content-Type", "application/json");
+
+ xhr.send(options.body);
+
+ T(db.save({"_id":"andmore", "bop" : "plankton"}).ok);
+
+
+ waitForSuccess(function() {
+ if (xhr.readyState != 4) {
+ throw("still waiting");
+ }
+ }, "andmore-only");
+
+ var line = JSON.parse(xhr.responseText.split("\n")[0]);
+ T(line.seq == 8);
+ T(line.id == "andmore");
+ }
+
+ });
+
};
+
diff --git a/share/www/script/test/compact.js b/share/www/script/test/compact.js
index f63bfc57..805a3b08 100644
--- a/share/www/script/test/compact.js
+++ b/share/www/script/test/compact.js
@@ -26,11 +26,12 @@ couchTests.compact = function(debug) {
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
- }
+ };
T(db.save(binAttDoc).ok);
var originalsize = db.info().disk_size;
+ var start_time = db.info().instance_start_time;
for(var i in docs) {
db.deleteDoc(docs[i]);
@@ -38,17 +39,20 @@ couchTests.compact = function(debug) {
T(db.ensureFullCommit().ok);
var deletesize = db.info().disk_size;
T(deletesize > originalsize);
+ T(db.setDbProperty("_revs_limit", 666).ok);
T(db.compact().ok);
T(db.last_req.status == 202);
// compaction isn't instantaneous, loop until done
while (db.info().compact_running) {};
+ T(db.info().instance_start_time == start_time);
+ T(db.getDbProperty("_revs_limit") === 666);
T(db.ensureFullCommit().ok);
restartServer();
var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt");
- T(xhr.responseText == "This is a base64 encoded text")
- T(xhr.getResponseHeader("Content-Type") == "text/plain")
+ T(xhr.responseText == "This is a base64 encoded text");
+ T(xhr.getResponseHeader("Content-Type") == "text/plain");
T(db.info().doc_count == 1);
T(db.info().disk_size < deletesize);
diff --git a/share/www/script/test/config.js b/share/www/script/test/config.js
index ef74934b..e83ecfd9 100644
--- a/share/www/script/test/config.js
+++ b/share/www/script/test/config.js
@@ -29,19 +29,25 @@ couchTests.config = function(debug) {
*/
var server_port = CouchDB.host.split(':');
if(server_port.length == 1 && CouchDB.inBrowser) {
- var proto = window.location.protocol;
- if(proto == "http:") {
+ if(CouchDB.protocol == "http://") {
port = 80;
}
- if(proto == "https:") {
+ if(CouchDB.protocol == "https://") {
port = 443;
}
} else {
port = server_port.pop();
}
+ if(CouchDB.protocol == "http://") {
+ config_port = config.httpd.port;
+ }
+ if(CouchDB.protocol == "https://") {
+ config_port = config.ssl.port;
+ }
+
if(port) {
- T(config.httpd.port == port);
+ TEquals(config_port, port, "ports should match");
}
T(config.couchdb.database_dir);
@@ -50,7 +56,8 @@ couchTests.config = function(debug) {
T(config.log.level);
T(config.query_servers.javascript);
- // test that settings can be altered
+ // test that settings can be altered, and that an undefined whitelist allows any change
+ TEquals(undefined, config.httpd.config_whitelist, "Default whitelist is empty");
xhr = CouchDB.request("PUT", "/_config/test/foo",{
body : JSON.stringify("bar"),
headers: {"X-Couch-Persist": "false"}
@@ -64,4 +71,93 @@ couchTests.config = function(debug) {
xhr = CouchDB.request("GET", "/_config/test/foo");
config = JSON.parse(xhr.responseText);
T(config == "bar");
+
+ // Non-term whitelist values allow further modification of the whitelist.
+ xhr = CouchDB.request("PUT", "/_config/httpd/config_whitelist",{
+ body : JSON.stringify("!This is an invalid Erlang term!"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Set config whitelist to an invalid Erlang term");
+ xhr = CouchDB.request("DELETE", "/_config/httpd/config_whitelist",{
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Modify whitelist despite it being invalid syntax");
+
+ // Non-list whitelist values allow further modification of the whitelist.
+ xhr = CouchDB.request("PUT", "/_config/httpd/config_whitelist",{
+ body : JSON.stringify("{[yes, a_valid_erlang_term, but_unfortunately, not_a_list]}"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Set config whitelist to an non-list term");
+ xhr = CouchDB.request("DELETE", "/_config/httpd/config_whitelist",{
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Modify whitelist despite it not being a list");
+
+ // Keys not in the whitelist may not be modified.
+ xhr = CouchDB.request("PUT", "/_config/httpd/config_whitelist",{
+ body : JSON.stringify("[{httpd,config_whitelist}, {test,foo}]"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Set config whitelist to something valid");
+
+ ["PUT", "DELETE"].forEach(function(method) {
+ ["test/not_foo", "not_test/foo", "neither_test/nor_foo"].forEach(function(pair) {
+ var path = "/_config/" + pair;
+ var test_name = method + " to " + path + " disallowed: not whitelisted";
+
+ xhr = CouchDB.request(method, path, {
+ body : JSON.stringify("Bummer! " + test_name),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(400, xhr.status, test_name);
+ });
+ });
+
+ // Keys in the whitelist may be modified.
+ ["PUT", "DELETE"].forEach(function(method) {
+ xhr = CouchDB.request(method, "/_config/test/foo",{
+ body : JSON.stringify(method + " to whitelisted config variable"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Keys in the whitelist may be modified");
+ });
+
+ // Non-2-tuples in the whitelist are ignored
+ xhr = CouchDB.request("PUT", "/_config/httpd/config_whitelist",{
+ body : JSON.stringify("[{httpd,config_whitelist}, these, {are}, {nOt, 2, tuples}," +
+ " [so], [they, will], [all, become, noops], {test,foo}]"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Set config whitelist with some inert values");
+ ["PUT", "DELETE"].forEach(function(method) {
+ xhr = CouchDB.request(method, "/_config/test/foo",{
+ body : JSON.stringify(method + " to whitelisted config variable"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Update whitelisted variable despite invalid entries");
+ });
+
+ // Atoms, binaries, and strings suffice as whitelist sections and keys.
+ ["{test,foo}", '{"test","foo"}', '{<<"test">>,<<"foo">>}'].forEach(function(pair) {
+ xhr = CouchDB.request("PUT", "/_config/httpd/config_whitelist",{
+ body : JSON.stringify("[{httpd,config_whitelist}, " + pair + "]"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Set config whitelist to include " + pair);
+
+ var pair_format = {"t":"tuple", '"':"string", "<":"binary"}[pair[1]];
+ ["PUT", "DELETE"].forEach(function(method) {
+ xhr = CouchDB.request(method, "/_config/test/foo",{
+ body : JSON.stringify(method + " with " + pair_format),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Whitelist works with " + pair_format);
+ });
+ });
+
+ xhr = CouchDB.request("DELETE", "/_config/httpd/config_whitelist",{
+ headers: {"X-Couch-Persist": "false"}
+ });
+ TEquals(200, xhr.status, "Reset config whitelist to undefined");
};
diff --git a/share/www/script/test/conflicts.js b/share/www/script/test/conflicts.js
index b8b93946..7258bc31 100644
--- a/share/www/script/test/conflicts.js
+++ b/share/www/script/test/conflicts.js
@@ -44,7 +44,7 @@ couchTests.conflicts = function(debug) {
var changes = db.changes();
- T( changes.results.length == 1)
+ T(changes.results.length == 1);
// Now clear out the _rev member and save. This indicates this document is
// new, not based on an existing revision.
diff --git a/share/www/script/test/content_negotiation.js b/share/www/script/test/content_negotiation.js
index 171dbb3d..c79df948 100644
--- a/share/www/script/test/content_negotiation.js
+++ b/share/www/script/test/content_negotiation.js
@@ -17,11 +17,14 @@ couchTests.content_negotiation = function(debug) {
if (debug) debugger;
var xhr;
- xhr = CouchDB.request("GET", "/test_suite_db/");
- TEquals("text/plain;charset=utf-8", xhr.getResponseHeader("Content-Type"));
+ // with no accept header
+ var req = CouchDB.newXhr();
+ req.open("GET", "/test_suite_db/", false);
+ req.send("");
+ TEquals("text/plain;charset=utf-8", req.getResponseHeader("Content-Type"));
// make sure JSON responses end in a newline
- var text = xhr.responseText;
+ var text = req.responseText;
TEquals("\n", text[text.length-1]);
xhr = CouchDB.request("GET", "/test_suite_db/", {
diff --git a/share/www/script/test/cookie_auth.js b/share/www/script/test/cookie_auth.js
index 9eadfee0..8ad993cc 100644
--- a/share/www/script/test/cookie_auth.js
+++ b/share/www/script/test/cookie_auth.js
@@ -46,39 +46,39 @@ couchTests.cookie_auth = function(debug) {
// Create a user
var jasonUserDoc = CouchDB.prepareUserDoc({
- username: "Jason Davies",
+ name: "Jason Davies",
roles: ["dev"]
}, password);
T(usersDb.save(jasonUserDoc).ok);
var checkDoc = usersDb.open(jasonUserDoc._id);
- T(checkDoc.username == "Jason Davies");
+ T(checkDoc.name == "Jason Davies");
var jchrisUserDoc = CouchDB.prepareUserDoc({
- username: "jchris@apache.org"
+ name: "jchris@apache.org"
}, "funnybone");
T(usersDb.save(jchrisUserDoc).ok);
// make sure we cant create duplicate users
var duplicateJchrisDoc = CouchDB.prepareUserDoc({
- username: "jchris@apache.org"
+ name: "jchris@apache.org"
}, "eh, Boo-Boo?");
try {
- usersDb.save(duplicateJchrisDoc)
+ usersDb.save(duplicateJchrisDoc);
T(false && "Can't create duplicate user names. Should have thrown an error.");
} catch (e) {
T(e.error == "conflict");
T(usersDb.last_req.status == 409);
}
- // we can't create _usernames
+ // we can't create _names
var underscoreUserDoc = CouchDB.prepareUserDoc({
- username: "_why"
+ name: "_why"
}, "copperfield");
try {
- usersDb.save(underscoreUserDoc)
+ usersDb.save(underscoreUserDoc);
T(false && "Can't create underscore user names. Should have thrown an error.");
} catch (e) {
T(e.error == "forbidden");
@@ -87,46 +87,59 @@ couchTests.cookie_auth = function(debug) {
// we can't create docs with malformed ids
var badIdDoc = CouchDB.prepareUserDoc({
- username: "foo"
+ name: "foo"
}, "bar");
badIdDoc._id = "org.apache.couchdb:w00x";
try {
- usersDb.save(badIdDoc)
+ usersDb.save(badIdDoc);
T(false && "Can't create malformed docids. Should have thrown an error.");
} catch (e) {
T(e.error == "forbidden");
T(usersDb.last_req.status == 403);
}
-
- try {
- usersDb.save(underscoreUserDoc)
- T(false && "Can't create underscore user names. Should have thrown an error.");
- } catch (e) {
- T(e.error == "forbidden");
- T(usersDb.last_req.status == 403);
- }
// login works
T(CouchDB.login('Jason Davies', password).ok);
- T(CouchDB.session().name == 'Jason Davies');
+ T(CouchDB.session().userCtx.name == 'Jason Davies');
+ // JSON login works
+ var xhr = CouchDB.request("POST", "/_session", {
+ headers: {"Content-Type": "application/json"},
+ body: JSON.stringify({
+ name: 'Jason Davies',
+ password: password
+ })
+ });
+
+ T(JSON.parse(xhr.responseText).ok);
+ T(CouchDB.session().userCtx.name == 'Jason Davies');
+
// update one's own credentials document
jasonUserDoc.foo=2;
T(usersDb.save(jasonUserDoc).ok);
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
+ // can't delete another users doc unless you are admin
+ try {
+ usersDb.deleteDoc(jchrisUserDoc);
+ T(false && "Can't delete other users docs. Should have thrown an error.");
+ } catch (e) {
+ T(e.error == "forbidden");
+ T(usersDb.last_req.status == 403);
+ }
// TODO should login() throw an exception here?
T(!CouchDB.login('Jason Davies', "2.71828").ok);
T(!CouchDB.login('Robert Allen Zimmerman', 'd00d').ok);
// a failed login attempt should log you out
- T(CouchDB.session().name != 'Jason Davies');
+ T(CouchDB.session().userCtx.name != 'Jason Davies');
// test redirect
xhr = CouchDB.request("POST", "/_session?next=/", {
headers: {"Content-Type": "application/x-www-form-urlencoded"},
- body: "username=Jason%20Davies&password="+encodeURIComponent(password)
+ body: "name=Jason%20Davies&password="+encodeURIComponent(password)
});
// should this be a redirect code instead of 200?
// The cURL adapter is returning the expected 302 here.
@@ -134,23 +147,23 @@ couchTests.cookie_auth = function(debug) {
// to follow the redirect, ie, the browser follows and does a
// GET on the returned Location
if (xhr.status == 200) {
- T(/Welcome/.test(xhr.responseText))
+ T(/Welcome/.test(xhr.responseText));
} else {
- T(xhr.status == 302)
- T(xhr.getResponseHeader("Location"))
+ T(xhr.status == 302);
+ T(xhr.getResponseHeader("Location"));
}
// test users db validations
//
// test that you can't update docs unless you are logged in as the user (or are admin)
T(CouchDB.login("jchris@apache.org", "funnybone").ok);
- T(CouchDB.session().name == "jchris@apache.org");
- T(CouchDB.session().roles.length == 0);
+ T(CouchDB.session().userCtx.name == "jchris@apache.org");
+ T(CouchDB.session().userCtx.roles.length == 0);
jasonUserDoc.foo=3;
try {
- usersDb.save(jasonUserDoc)
+ usersDb.save(jasonUserDoc);
T(false && "Can't update someone else's user doc. Should have thrown an error.");
} catch (e) {
T(e.error == "forbidden");
@@ -161,7 +174,7 @@ couchTests.cookie_auth = function(debug) {
jchrisUserDoc.roles = ["foo"];
try {
- usersDb.save(jchrisUserDoc)
+ usersDb.save(jchrisUserDoc);
T(false && "Can't set roles unless you are admin. Should have thrown an error.");
} catch (e) {
T(e.error == "forbidden");
@@ -169,7 +182,7 @@ couchTests.cookie_auth = function(debug) {
}
T(CouchDB.logout().ok);
- T(CouchDB.session().roles[0] == "_admin");
+ T(CouchDB.session().userCtx.roles[0] == "_admin");
jchrisUserDoc.foo = ["foo"];
T(usersDb.save(jchrisUserDoc).ok);
@@ -178,7 +191,7 @@ couchTests.cookie_auth = function(debug) {
jchrisUserDoc.roles = ["_bar"];
try {
- usersDb.save(jchrisUserDoc)
+ usersDb.save(jchrisUserDoc);
T(false && "Can't add system roles to user's db. Should have thrown an error.");
} catch (e) {
T(e.error == "forbidden");
@@ -187,24 +200,24 @@ couchTests.cookie_auth = function(debug) {
// make sure the foo role has been applied
T(CouchDB.login("jchris@apache.org", "funnybone").ok);
- T(CouchDB.session().name == "jchris@apache.org");
- T(CouchDB.session().roles.indexOf("_admin") == -1);
- T(CouchDB.session().roles.indexOf("foo") != -1);
+ T(CouchDB.session().userCtx.name == "jchris@apache.org");
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
+ T(CouchDB.session().userCtx.roles.indexOf("foo") != -1);
// now let's make jchris a server admin
T(CouchDB.logout().ok);
- T(CouchDB.session().roles[0] == "_admin");
- T(CouchDB.session().name == null);
+ T(CouchDB.session().userCtx.roles[0] == "_admin");
+ T(CouchDB.session().userCtx.name == null);
// set the -hashed- password so the salt matches
// todo ask on the ML about this
run_on_modified_server([{section: "admins",
key: "jchris@apache.org", value: "funnybone"}], function() {
T(CouchDB.login("jchris@apache.org", "funnybone").ok);
- T(CouchDB.session().name == "jchris@apache.org");
- T(CouchDB.session().roles.indexOf("_admin") != -1);
+ T(CouchDB.session().userCtx.name == "jchris@apache.org");
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") != -1);
// test that jchris still has the foo role
- T(CouchDB.session().roles.indexOf("foo") != -1);
+ T(CouchDB.session().userCtx.roles.indexOf("foo") != -1);
// should work even when user doc has no password
jchrisUserDoc = usersDb.open(jchrisUserDoc._id);
@@ -214,13 +227,13 @@ couchTests.cookie_auth = function(debug) {
T(CouchDB.logout().ok);
T(CouchDB.login("jchris@apache.org", "funnybone").ok);
var s = CouchDB.session();
- T(s.name == "jchris@apache.org");
- T(s.roles.indexOf("_admin") != -1);
+ T(s.userCtx.name == "jchris@apache.org");
+ T(s.userCtx.roles.indexOf("_admin") != -1);
// test session info
- T(s.info.authenticated == "{couch_httpd_auth, cookie_authentication_handler}");
- T(s.info.user_db == "test_suite_users");
+ T(s.info.authenticated == "cookie");
+ T(s.info.authentication_db == "test_suite_users");
// test that jchris still has the foo role
- T(CouchDB.session().roles.indexOf("foo") != -1);
+ T(CouchDB.session().userCtx.roles.indexOf("foo") != -1);
});
} finally {
diff --git a/share/www/script/test/copy_doc.js b/share/www/script/test/copy_doc.js
index a6de1892..99e3c7fe 100644
--- a/share/www/script/test/copy_doc.js
+++ b/share/www/script/test/copy_doc.js
@@ -36,6 +36,9 @@ couchTests.copy_doc = function(debug) {
});
T(xhr.status == 409); // conflict
+ var xhr = CouchDB.request("COPY", "/test_suite_db/doc_to_be_copied2");
+ T(xhr.status == 400); // bad request (no Destination header)
+
var rev = db.open("doc_to_be_overwritten")._rev;
var xhr = CouchDB.request("COPY", "/test_suite_db/doc_to_be_copied2", {
headers: {"Destination":"doc_to_be_overwritten?rev=" + rev}
diff --git a/share/www/script/test/delayed_commits.js b/share/www/script/test/delayed_commits.js
index d0c87182..dbb072fb 100644
--- a/share/www/script/test/delayed_commits.js
+++ b/share/www/script/test/delayed_commits.js
@@ -122,4 +122,33 @@ couchTests.delayed_commits = function(debug) {
}
});
+
+ // Test that a conflict can't cause delayed commits to fail
+ run_on_modified_server(
+ [{section: "couchdb",
+ key: "delayed_commits",
+ value: "true"}],
+
+ function() {
+ //First save a document and commit it
+ T(db.save({_id:"6",a:2,b:4}).ok);
+ T(db.ensureFullCommit().ok);
+ //Generate a conflict
+ try {
+ db.save({_id:"6",a:2,b:4});
+ } catch( e) {
+ T(e.error == "conflict");
+ }
+ //Wait for the delayed commit interval to pass
+ var time = new Date();
+ while(new Date() - time < 2000);
+ //Save a new doc
+ T(db.save({_id:"7",a:2,b:4}).ok);
+ //Wait for the delayed commit interval to pass
+ var time = new Date();
+ while(new Date() - time < 2000);
+ //Crash the server and make sure the last doc was written
+ restartServer();
+ T(db.open("7") != null);
+ });
};
diff --git a/share/www/script/test/design_docs.js b/share/www/script/test/design_docs.js
index 9318d2bc..f2f63841 100644
--- a/share/www/script/test/design_docs.js
+++ b/share/www/script/test/design_docs.js
@@ -13,139 +13,309 @@
couchTests.design_docs = function(debug) {
var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
var db2 = new CouchDB("test_suite_db_a", {"X-Couch-Full-Commit":"false"});
+
+ if (debug) debugger;
+
db.deleteDb();
db.createDb();
db2.deleteDb();
db2.createDb();
- if (debug) debugger;
- run_on_modified_server(
- [{section: "query_server_config",
+ var server_config = [
+ {
+ section: "query_server_config",
key: "reduce_limit",
- value: "false"}],
-function() {
+ value: "false"
+ }
+ ];
- var numDocs = 500;
+ var testFun = function() {
+ var numDocs = 500;
- function makebigstring(power) {
- var str = "a";
- while(power-- > 0) {
- str = str + str;
+ function makebigstring(power) {
+ var str = "a";
+ while(power-- > 0) {
+ str = str + str;
+ }
+ return str;
}
- return str;
- }
-
- var designDoc = {
- _id:"_design/test", // turn off couch.js id escaping?
- language: "javascript",
- views: {
- all_docs_twice: {map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }"},
- no_docs: {map: "function(doc) {}"},
- single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"},
- summate: {map:"function (doc) {emit(doc.integer, doc.integer)};",
- reduce:"function (keys, values) { return sum(values); };"},
- summate2: {map:"function (doc) {emit(doc.integer, doc.integer)};",
- reduce:"function (keys, values) { return sum(values); };"},
- huge_src_and_results: {map: "function(doc) { if (doc._id == \"1\") { emit(\"" + makebigstring(16) + "\", null) }}",
- reduce:"function (keys, values) { return \"" + makebigstring(16) + "\"; };"}
- },
- shows: {
- simple: "function() {return 'ok'};"
- }
- }
- var xhr = CouchDB.request("PUT", "/test_suite_db_a/_design/test", {body: JSON.stringify(designDoc)});
- var resp = JSON.parse(xhr.responseText);
-
- TEquals(resp.rev, db.save(designDoc).rev);
-
- // test that editing a show fun on the ddoc results in a change in output
- var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
- T(xhr.status == 200);
- TEquals(xhr.responseText, "ok");
-
- designDoc.shows.simple = "function() {return 'ko'};"
- T(db.save(designDoc).ok);
-
- var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
- T(xhr.status == 200);
- TEquals(xhr.responseText, "ko");
-
- var xhr = CouchDB.request("GET", "/test_suite_db_a/_design/test/_show/simple?cache=buster");
- T(xhr.status == 200);
- TEquals("ok", xhr.responseText, 'query server used wrong ddoc');
-
- // test that we get design doc info back
- var dinfo = db.designInfo("_design/test");
- TEquals("test", dinfo.name);
- var vinfo = dinfo.view_index;
- TEquals(51, vinfo.disk_size);
- TEquals(false, vinfo.compact_running);
- TEquals("3f88e53b303e2342e49a66c538c30679", vinfo.signature);
-
- db.bulkSave(makeDocs(1, numDocs + 1));
-
- // test that the _all_docs view returns correctly with keys
- var results = db.allDocs({startkey:"_design", endkey:"_design0"});
- T(results.rows.length == 1);
-
- for (var loop = 0; loop < 2; loop++) {
- var rows = db.view("test/all_docs_twice").rows;
- for (var i = 0; i < numDocs; i++) {
- T(rows[2*i].key == i+1);
- T(rows[(2*i)+1].key == i+1);
- }
- T(db.view("test/no_docs").total_rows == 0)
- T(db.view("test/single_doc").total_rows == 1)
+
+ var designDoc = {
+ _id: "_design/test",
+ language: "javascript",
+ whatever : {
+ stringzone : "exports.string = 'plankton';",
+ commonjs : {
+ whynot : "exports.test = require('../stringzone'); " +
+ "exports.foo = require('whatever/stringzone');",
+ upper : "exports.testing = require('./whynot').test.string.toUpperCase()+" +
+ "module.id+require('./whynot').foo.string"
+ }
+ },
+ views: {
+ all_docs_twice: {
+ map:
+ (function(doc) {
+ emit(doc.integer, null);
+ emit(doc.integer, null);
+ }).toString()
+ },
+ no_docs: {
+ map:
+ (function(doc) {
+ }).toString()
+ },
+ single_doc: {
+ map:
+ (function(doc) {
+ if (doc._id === "1") {
+ emit(1, null);
+ }
+ }).toString()
+ },
+ summate: {
+ map:
+ (function(doc) {
+ emit(doc.integer, doc.integer);
+ }).toString(),
+ reduce:
+ (function(keys, values) {
+ return sum(values);
+ }).toString()
+ },
+ summate2: {
+ map:
+ (function(doc) {
+ emit(doc.integer, doc.integer);
+ }).toString(),
+ reduce:
+ (function(keys, values) {
+ return sum(values);
+ }).toString()
+ },
+ huge_src_and_results: {
+ map:
+ (function(doc) {
+ if (doc._id === "1") {
+ emit(makebigstring(16), null);
+ }
+ }).toString(),
+ reduce:
+ (function(keys, values) {
+ return makebigstring(16);
+ }).toString()
+ },
+ lib : {
+ baz : "exports.baz = 'bam';",
+ foo : {
+ foo : "exports.foo = 'bar';",
+ boom : "exports.boom = 'ok';",
+ zoom : "exports.zoom = 'yeah';"
+ }
+ },
+ commonjs : {
+ map :
+ (function(doc) {
+ emit(null, require('views/lib/foo/boom').boom);
+ }).toString()
+ }
+ },
+ shows: {
+ simple:
+ (function() {
+ return 'ok';
+ }).toString(),
+ requirey:
+ (function() {
+ var lib = require('whatever/commonjs/upper');
+ return lib.testing;
+ }).toString(),
+ circular:
+ (function() {
+ var lib = require('whatever/commonjs/upper');
+ return JSON.stringify(this);
+ }).toString()
+ }
+ }; // designDoc
+
+ var xhr = CouchDB.request(
+ "PUT", "/test_suite_db_a/_design/test", {body: JSON.stringify(designDoc)}
+ );
+ var resp = JSON.parse(xhr.responseText);
+
+ TEquals(resp.rev, db.save(designDoc).rev);
+
+ // test that editing a show fun on the ddoc results in a change in output
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
+ T(xhr.status == 200);
+ TEquals(xhr.responseText, "ok");
+
+ designDoc.shows.simple = (function() {
+ return 'ko';
+ }).toString();
+ T(db.save(designDoc).ok);
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
+ T(xhr.status == 200);
+ TEquals(xhr.responseText, "ko");
+
+ xhr = CouchDB.request(
+ "GET", "/test_suite_db_a/_design/test/_show/simple?cache=buster"
+ );
+ T(xhr.status == 200);
+ TEquals("ok", xhr.responseText, 'query server used wrong ddoc');
+
+ // test commonjs require
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/requirey");
+ T(xhr.status == 200);
+ TEquals("PLANKTONwhatever/commonjs/upperplankton", xhr.responseText);
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/circular");
+ T(xhr.status == 200);
+ TEquals("javascript", JSON.parse(xhr.responseText).language);
+
+ var prev_view_sig = db.designInfo("_design/test").view_index.signature;
+ var prev_view_size = db.designInfo("_design/test").view_index.disk_size;
+
+ db.bulkSave(makeDocs(1, numDocs + 1));
T(db.ensureFullCommit().ok);
- restartServer();
- };
-
- // test when language not specified, Javascript is implied
- var designDoc2 = {
- _id:"_design/test2",
- // language: "javascript",
- views: {
- single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"}
- }
- };
- T(db.save(designDoc2).ok);
- T(db.view("test2/single_doc").total_rows == 1);
+ // test that we get correct design doc info back,
+ // and also that GET /db/_design/test/_info
+ // hasn't triggered an update of the views
+ db.view("test/summate", {stale: "ok"}); // make sure view group's open
+ for (var i = 0; i < 2; i++) {
+ var dinfo = db.designInfo("_design/test");
+ TEquals("test", dinfo.name);
+ var vinfo = dinfo.view_index;
+ TEquals(prev_view_size, vinfo.disk_size, "view group disk size didn't change");
+ TEquals(false, vinfo.compact_running);
+ TEquals(prev_view_sig, vinfo.signature, 'ddoc sig');
+ // wait some time (there were issues where an update
+ // of the views had been triggered in the background)
+ var start = new Date().getTime();
+ while (new Date().getTime() < start + 2000);
+ TEquals(0, db.view("test/all_docs_twice", {stale: "ok"}).total_rows, 'view info');
+ TEquals(0, db.view("test/single_doc", {stale: "ok"}).total_rows, 'view info');
+ TEquals(0, db.view("test/summate", {stale: "ok"}).rows.length, 'view info');
+ T(db.ensureFullCommit().ok);
+ restartServer();
+ };
+
+ db.bulkSave(makeDocs(numDocs + 1, numDocs * 2 + 1));
+ T(db.ensureFullCommit().ok);
+
+ // open view group
+ db.view("test/summate", {stale: "ok"});
+ // wait so the views can get initialized
+ var start = new Date().getTime();
+ while (new Date().getTime() < start + 2000);
+
+ // test that POST /db/_view_cleanup
+ // doesn't trigger an update of the views
+ var len1 = db.view("test/all_docs_twice", {stale: "ok"}).total_rows;
+ var len2 = db.view("test/single_doc", {stale: "ok"}).total_rows;
+ var len3 = db.view("test/summate", {stale: "ok"}).rows.length;
+ for (i = 0; i < 2; i++) {
+ T(db.viewCleanup().ok);
+ // wait some time (there were issues where an update
+ // of the views had been triggered in the background)
+ start = new Date().getTime();
+ while (new Date().getTime() < start + 2000);
+ TEquals(len1, db.view("test/all_docs_twice", {stale: "ok"}).total_rows, 'view cleanup');
+ TEquals(len2, db.view("test/single_doc", {stale: "ok"}).total_rows, 'view cleanup');
+ TEquals(len3, db.view("test/summate", {stale: "ok"}).rows.length, 'view cleanup');
+ T(db.ensureFullCommit().ok);
+ restartServer();
+ // we'll test whether the view group stays closed
+ // and the views stay uninitialized (they should!)
+ len1 = len2 = len3 = 0;
+ };
+
+ // test commonjs in map functions
+ resp = db.view("test/commonjs", {limit:1});
+ T(resp.rows[0].value == 'ok');
+
+ // test that the _all_docs view returns correctly with keys
+ var results = db.allDocs({startkey:"_design", endkey:"_design0"});
+ T(results.rows.length == 1);
+
+ for (i = 0; i < 2; i++) {
+ var rows = db.view("test/all_docs_twice").rows;
+ for (var j = 0; j < numDocs; j++) {
+ T(rows[2 * j].key == (j + 1));
+ T(rows[(2 * j) + 1].key == (j + 1));
+ };
+ T(db.view("test/no_docs").total_rows == 0);
+ T(db.view("test/single_doc").total_rows == 1);
+ T(db.ensureFullCommit().ok);
+ restartServer();
+ };
- var summate = function(N) {return (N+1)*N/2;};
- var result = db.view("test/summate");
- T(result.rows[0].value == summate(numDocs));
+ // test when language not specified, Javascript is implied
+ var designDoc2 = {
+ _id: "_design/test2",
+ // language: "javascript",
+ views: {
+ single_doc: {
+ map:
+ (function(doc) {
+ if (doc._id === "1") {
+ emit(1, null);
+ }
+ }).toString()
+ }
+ }
+ };
- result = db.view("test/summate", {startkey:4,endkey:4});
- T(result.rows[0].value == 4);
+ T(db.save(designDoc2).ok);
+ T(db.view("test2/single_doc").total_rows == 1);
- result = db.view("test/summate", {startkey:4,endkey:5});
- T(result.rows[0].value == 9);
+ var summate = function(N) {
+ return (N + 1) * (N / 2);
+ };
+ var result = db.view("test/summate");
+ T(result.rows[0].value == summate(numDocs * 2));
- result = db.view("test/summate", {startkey:4,endkey:6});
- T(result.rows[0].value == 15);
+ result = db.view("test/summate", {startkey: 4, endkey: 4});
+ T(result.rows[0].value == 4);
- // Verify that a shared index (view def is an exact copy of "summate")
- // does not confuse the reduce stage
- result = db.view("test/summate2", {startkey:4,endkey:6});
- T(result.rows[0].value == 15);
+ result = db.view("test/summate", {startkey: 4, endkey: 5});
+ T(result.rows[0].value == 9);
- for(var i=1; i<numDocs/2; i+=30) {
- result = db.view("test/summate", {startkey:i,endkey:numDocs-i});
- T(result.rows[0].value == summate(numDocs-i) - summate(i-1));
- }
+ result = db.view("test/summate", {startkey: 4, endkey: 6});
+ T(result.rows[0].value == 15);
- T(db.deleteDoc(designDoc).ok);
- T(db.open(designDoc._id) == null);
- T(db.view("test/no_docs") == null);
+ // test start_key and end_key aliases
+ result = db.view("test/summate", {start_key: 4, end_key: 6});
+ T(result.rows[0].value == 15);
- T(db.ensureFullCommit().ok);
- restartServer();
- T(db.open(designDoc._id) == null);
- T(db.view("test/no_docs") == null);
+ // Verify that a shared index (view def is an exact copy of "summate")
+ // does not confuse the reduce stage
+ result = db.view("test/summate2", {startkey: 4, endkey: 6});
+ T(result.rows[0].value == 15);
- // trigger ddoc cleanup
- T(db.viewCleanup().ok);
+ for(i = 1; i < (numDocs / 2); i += 30) {
+ result = db.view("test/summate", {startkey: i, endkey: (numDocs - i)});
+ T(result.rows[0].value == summate(numDocs - i) - summate(i - 1));
+ }
+
+ T(db.deleteDoc(designDoc).ok);
+ T(db.open(designDoc._id) == null);
+ T(db.view("test/no_docs") == null);
+
+ T(db.ensureFullCommit().ok);
+ restartServer();
+ T(db.open(designDoc._id) == null);
+ T(db.view("test/no_docs") == null);
-});
+ // trigger ddoc cleanup
+ T(db.viewCleanup().ok);
+ }; // enf of testFun
+
+ run_on_modified_server(server_config, testFun);
+
+ // cleanup
+ db.deleteDb();
+ db2.deleteDb();
};
diff --git a/share/www/script/test/erlang_views.js b/share/www/script/test/erlang_views.js
index e9e0363f..7eddab40 100644
--- a/share/www/script/test/erlang_views.js
+++ b/share/www/script/test/erlang_views.js
@@ -29,8 +29,8 @@ couchTests.erlang_views = function(debug) {
T(db.save(doc).ok);
var mfun = 'fun({Doc}) -> ' +
- ' K = proplists:get_value(<<"integer">>, Doc, null), ' +
- ' V = proplists:get_value(<<"string">>, Doc, null), ' +
+ ' K = couch_util:get_value(<<"integer">>, Doc, null), ' +
+ ' V = couch_util:get_value(<<"string">>, Doc, null), ' +
' Emit(K, V) ' +
'end.';
@@ -44,7 +44,7 @@ couchTests.erlang_views = function(debug) {
// check simple reduction - another doc with same key.
var doc = {_id: "2", integer: 1, string: "str2"};
T(db.save(doc).ok);
- rfun = "fun(Keys, Values, ReReduce) -> length(Values) end."
+ rfun = "fun(Keys, Values, ReReduce) -> length(Values) end.";
results = db.query(mfun, rfun, null, null, "erlang");
T(results.rows[0].value == 2);
@@ -55,9 +55,9 @@ couchTests.erlang_views = function(debug) {
shows: {
simple:
'fun(Doc, {Req}) -> ' +
- ' {Info} = proplists:get_value(<<"info">>, Req, {[]}), ' +
- ' Purged = proplists:get_value(<<"purge_seq">>, Info, -1), ' +
- ' Verb = proplists:get_value(<<"method">>, Req, <<"not_get">>), ' +
+ ' {Info} = couch_util:get_value(<<"info">>, Req, {[]}), ' +
+ ' Purged = couch_util:get_value(<<"purge_seq">>, Info, -1), ' +
+ ' Verb = couch_util:get_value(<<"method">>, Req, <<"not_get">>), ' +
' R = list_to_binary(io_lib:format("~b - ~s", [Purged, Verb])), ' +
' {[{<<"code">>, 200}, {<<"headers">>, {[]}}, {<<"body">>, R}]} ' +
'end.'
@@ -67,7 +67,7 @@ couchTests.erlang_views = function(debug) {
'fun(Head, {Req}) -> ' +
' Send(<<"head">>), ' +
' Fun = fun({Row}, _) -> ' +
- ' Val = proplists:get_value(<<"value">>, Row, -1), ' +
+ ' Val = couch_util:get_value(<<"value">>, Row, -1), ' +
' Send(list_to_binary(integer_to_list(Val))), ' +
' {ok, nil} ' +
' end, ' +
@@ -117,10 +117,10 @@ couchTests.erlang_views = function(debug) {
T(db.bulkSave(docs).length, 250, "Saved big doc set.");
var mfun = 'fun({Doc}) -> ' +
- 'Words = proplists:get_value(<<"words">>, Doc), ' +
+ 'Words = couch_util:get_value(<<"words">>, Doc), ' +
'lists:foreach(fun({Word}) -> ' +
- 'WordString = proplists:get_value(<<"word">>, Word), ' +
- 'Count = proplists:get_value(<<"count">>, Word), ' +
+ 'WordString = couch_util:get_value(<<"word">>, Word), ' +
+ 'Count = couch_util:get_value(<<"count">>, Word), ' +
'Emit(WordString , Count) ' +
'end, Words) ' +
'end.';
diff --git a/share/www/script/test/etags_views.js b/share/www/script/test/etags_views.js
index a12734f8..f556d6ac 100644
--- a/share/www/script/test/etags_views.js
+++ b/share/www/script/test/etags_views.js
@@ -11,23 +11,34 @@
// the License.
couchTests.etags_views = function(debug) {
- var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"true"});
db.deleteDb();
db.createDb();
if (debug) debugger;
var designDoc = {
- _id:"_design/etags",
+ _id: "_design/etags",
language: "javascript",
views : {
+ fooView: {
+ map: stringFun(function(doc) {
+ if (doc.foo) {
+ emit("bar", 1);
+ }
+ }),
+ },
basicView : {
map : stringFun(function(doc) {
- emit(doc.integer, doc.string);
+ if(doc.integer && doc.string) {
+ emit(doc.integer, doc.string);
+ }
})
},
withReduce : {
map : stringFun(function(doc) {
- emit(doc.integer, doc.string);
+ if(doc.integer && doc.string) {
+ emit(doc.integer, doc.string);
+ }
}),
reduce : stringFun(function(keys, values, rereduce) {
if (rereduce) {
@@ -38,11 +49,11 @@ couchTests.etags_views = function(debug) {
})
}
}
- }
+ };
T(db.save(designDoc).ok);
+ db.bulkSave(makeDocs(0, 10));
+
var xhr;
- var docs = makeDocs(0, 10);
- db.bulkSave(docs);
// verify get w/Etag on map view
xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
@@ -52,17 +63,92 @@ couchTests.etags_views = function(debug) {
headers: {"if-none-match": etag}
});
T(xhr.status == 304);
- // TODO GET with keys (when that is available)
+
+ // verify ETag doesn't change when an update
+ // doesn't change the view group's index
+ T(db.save({"_id":"doc1", "foo":"bar"}).ok);
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 == etag);
+
+ // Verify that purges affect etags
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/fooView");
+ var foo_etag = xhr.getResponseHeader("etag");
+ var doc1 = db.open("doc1");
+ xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
+ body: JSON.stringify({"doc1":[doc1._rev]})
+ });
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/fooView");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 != foo_etag);
+
+ // Test that _purge didn't affect the other view etags.
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 == etag);
+
+ // verify different views in the same view group may have different ETags
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/fooView");
+ var etag1 = xhr.getResponseHeader("etag");
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
+ var etag2 = xhr.getResponseHeader("etag");
+ T(etag1 != etag2);
+
+ // verify ETag changes when an update changes the view group's index.
+ db.bulkSave(makeDocs(10, 20));
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 != etag);
+
+ // verify ETag is the same after a restart
+ restartServer();
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/basicView");
+ var etag2 = xhr.getResponseHeader("etag");
+ T(etag1 == etag2);
// reduce view
xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce");
T(xhr.status == 200);
var etag = xhr.getResponseHeader("etag");
- xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce", {
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce",{
headers: {"if-none-match": etag}
});
T(xhr.status == 304);
+ // verify ETag doesn't change when an update
+ // doesn't change the view group's index
+ T(db.save({"_id":"doc3", "foo":"bar"}).ok);
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 == etag);
+ // purge
+ var doc3 = db.open("doc3");
+ xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
+ body: JSON.stringify({"doc3":[doc3._rev]})
+ });
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 == etag);
+
+ // verify different views in the same view group may have different ETags
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/fooView");
+ var etag1 = xhr.getResponseHeader("etag");
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce");
+ var etag2 = xhr.getResponseHeader("etag");
+ T(etag1 != etag2);
+
+ // verify ETag changes when an update changes the view group's index
+ db.bulkSave(makeDocs(20, 30));
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce");
+ var etag1 = xhr.getResponseHeader("etag");
+ T(etag1 != etag);
+
+ // verify ETag is the same after a restart
+ restartServer();
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/etags/_view/withReduce");
+ var etag2 = xhr.getResponseHeader("etag");
+ T(etag1 == etag2);
+
// confirm ETag changes with different POST bodies
xhr = CouchDB.request("POST", "/test_suite_db/_design/etags/_view/basicView",
{body: JSON.stringify({keys:[1]})}
diff --git a/share/www/script/test/http.js b/share/www/script/test/http.js
index 8a2e09b8..5f46af52 100644
--- a/share/www/script/test/http.js
+++ b/share/www/script/test/http.js
@@ -25,7 +25,7 @@ couchTests.http = function(debug) {
var xhr = CouchDB.request("PUT", "/test_suite_db/test", {body: "{}"});
var host = CouchDB.host;
- TEquals("http://" + host + "/test_suite_db/test",
+ TEquals(CouchDB.protocol + host + "/test_suite_db/test",
xhr.getResponseHeader("Location"),
"should include ip address");
@@ -34,7 +34,7 @@ couchTests.http = function(debug) {
headers: {"X-Forwarded-Host": "mysite.com"}
});
- TEquals("http://mysite.com/test_suite_db/test2",
+ TEquals(CouchDB.protocol + "mysite.com/test_suite_db/test2",
xhr.getResponseHeader("Location"),
"should include X-Forwarded-Host");
@@ -47,7 +47,7 @@ couchTests.http = function(debug) {
body: "{}",
headers: {"X-Host": "mysite2.com"}
});
- TEquals("http://mysite2.com/test_suite_db/test3",
+ TEquals(CouchDB.protocol + "mysite2.com/test_suite_db/test3",
xhr.getResponseHeader("Location"),
"should include X-Host");
});
diff --git a/share/www/script/test/jsonp.js b/share/www/script/test/jsonp.js
index dfd6a0df..9aba7189 100644
--- a/share/www/script/test/jsonp.js
+++ b/share/www/script/test/jsonp.js
@@ -32,38 +32,51 @@ couchTests.jsonp = function(debug) {
db.deleteDb();
db.createDb();
if (debug) debugger;
-
+
var doc = {_id:"0",a:0,b:0};
T(db.save(doc).ok);
+
+ // callback param is ignored unless jsonp is configured
+ var xhr = CouchDB.request("GET", "/test_suite_db/0?callback=jsonp_not_configured");
+ JSON.parse(xhr.responseText);
- // Test unchunked callbacks.
- var xhr = CouchDB.request("GET", "/test_suite_db/0?callback=jsonp_no_chunk");
- T(xhr.status == 200);
- jsonp_flag = 0;
- eval(xhr.responseText);
- T(jsonp_flag == 1);
- xhr = CouchDB.request("GET", "/test_suite_db/0?callback=foo\"");
- T(xhr.status == 400);
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "allow_jsonp",
+ value: "true"}],
+ function() {
- // Test chunked responses
- var doc = {_id:"1",a:1,b:1};
- T(db.save(doc).ok);
+ // Test unchunked callbacks.
+ var xhr = CouchDB.request("GET", "/test_suite_db/0?callback=jsonp_no_chunk");
+ T(xhr.status == 200);
+ jsonp_flag = 0;
+ eval(xhr.responseText);
+ T(jsonp_flag == 1);
+ xhr = CouchDB.request("GET", "/test_suite_db/0?callback=foo\"");
+ T(xhr.status == 400);
+
+ // Test chunked responses
+ var doc = {_id:"1",a:1,b:1};
+ T(db.save(doc).ok);
+
+ var designDoc = {
+ _id:"_design/test",
+ language: "javascript",
+ views: {
+ all_docs: {map: "function(doc) {if(doc.a) emit(null, doc.a);}"}
+ }
+ };
+ T(db.save(designDoc).ok);
+
+ var url = "/test_suite_db/_design/test/_view/all_docs?callback=jsonp_chunk";
+ xhr = CouchDB.request("GET", url);
+ T(xhr.status == 200);
+ jsonp_flag = 0;
+ eval(xhr.responseText);
+ T(jsonp_flag == 1);
+ xhr = CouchDB.request("GET", url + "\'");
+ T(xhr.status == 400);
+ });
- var designDoc = {
- _id:"_design/test",
- language: "javascript",
- views: {
- all_docs: {map: "function(doc) {if(doc.a) emit(null, doc.a);}"}
- }
- }
- T(db.save(designDoc).ok);
- var url = "/test_suite_db/_design/test/_view/all_docs?callback=jsonp_chunk";
- xhr = CouchDB.request("GET", url);
- T(xhr.status == 200);
- jsonp_flag = 0;
- eval(xhr.responseText);
- T(jsonp_flag == 1);
- xhr = CouchDB.request("GET", url + "\'");
- T(xhr.status == 400);
};
diff --git a/share/www/script/test/list_views.js b/share/www/script/test/list_views.js
index 54214c2c..e98a212e 100644
--- a/share/www/script/test/list_views.js
+++ b/share/www/script/test/list_views.js
@@ -156,6 +156,9 @@ couchTests.list_views = function(debug) {
var row = getRow();
send(row.doc.integer);
return "tail";
+ }),
+ secObj: stringFun(function(head, req) {
+ return toJSON(req.secObj);
})
}
};
@@ -178,7 +181,7 @@ couchTests.list_views = function(debug) {
'fun(Head, {Req}) -> ' +
' Send(<<"[">>), ' +
' Fun = fun({Row}, Sep) -> ' +
- ' Val = proplists:get_value(<<"key">>, Row, 23), ' +
+ ' Val = couch_util:get_value(<<"key">>, Row, 23), ' +
' Send(list_to_binary(Sep ++ integer_to_list(Val))), ' +
' {ok, ","} ' +
' end, ' +
@@ -201,6 +204,7 @@ couchTests.list_views = function(debug) {
T(xhr.status == 200, "standard get should be 200");
T(/head0123456789tail/.test(xhr.responseText));
+
// test that etags are available
var etag = xhr.getResponseHeader("etag");
xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/basicBasic/basicView", {
@@ -220,10 +224,13 @@ couchTests.list_views = function(debug) {
T(etag1 != etag2, "POST to map _list generates key-depdendent ETags");
// test the richness of the arguments
- xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/basicJSON/basicView");
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/basicJSON/basicView?update_seq=true");
T(xhr.status == 200, "standard get should be 200");
var resp = JSON.parse(xhr.responseText);
- TEquals(resp.head, {total_rows:10, offset:0});
+ TEquals(10, resp.head.total_rows);
+ TEquals(0, resp.head.offset);
+ TEquals(11, resp.head.update_seq);
+
T(resp.rows.length == 10);
TEquals(resp.rows[0], {"id": "0","key": 0,"value": "0"});
@@ -320,6 +327,16 @@ couchTests.list_views = function(debug) {
T(/FirstKey: 2/.test(xhr.responseText));
T(/LastKey: 7/.test(xhr.responseText));
+ // multi-key fetch with GET
+ var xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/simpleForm/basicView" +
+ "?keys=[2,4,5,7]");
+
+ T(xhr.status == 200, "multi key");
+ T(!(/Key: 1 /.test(xhr.responseText)));
+ T(/Key: 2/.test(xhr.responseText));
+ T(/FirstKey: 2/.test(xhr.responseText));
+ T(/LastKey: 7/.test(xhr.responseText));
+
// no multi-key fetch allowed when group=false
xhr = CouchDB.request("POST", "/test_suite_db/_design/lists/_list/simpleForm/withReduce?group=false", {
body: '{"keys":[2,4,5,7]}'
@@ -343,13 +360,21 @@ couchTests.list_views = function(debug) {
// T(xhr.getResponseHeader("Content-Type") == "text/plain");
T(xhr.responseText.match(/^head 0 1 2 tail$/) && "basic stop");
- xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/stopIter2/basicView");
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/stopIter2/basicView", {
+ headers : {
+ "Accept" : "text/html"
+ }
+ });
T(xhr.responseText.match(/^head 0 1 2 tail$/) && "stop 2");
// aborting iteration with reduce
var xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/stopIter/withReduce?group=true");
T(xhr.responseText.match(/^head 0 1 2 tail$/) && "reduce stop");
- xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/stopIter2/withReduce?group=true");
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/stopIter2/withReduce?group=true", {
+ headers : {
+ "Accept" : "text/html"
+ }
+ });
T(xhr.responseText.match(/^head 0 1 2 tail$/) && "reduce stop 2");
// with accept headers for HTML
@@ -383,7 +408,7 @@ couchTests.list_views = function(debug) {
T(/LastKey: 0/.test(xhr.responseText));
// Test we do multi-key requests on lists and views in separate docs.
- var url = "/test_suite_db/_design/lists/_list/simpleForm/views/basicView"
+ var url = "/test_suite_db/_design/lists/_list/simpleForm/views/basicView";
xhr = CouchDB.request("POST", url, {
body: '{"keys":[-2,-4,-5,-7]}'
});
@@ -394,6 +419,12 @@ couchTests.list_views = function(debug) {
T(/FirstKey: -2/.test(xhr.responseText));
T(/LastKey: -7/.test(xhr.responseText));
+ // Test if secObj is available
+ var xhr = CouchDB.request("GET", "/test_suite_db/_design/lists/_list/secObj/basicView");
+ T(xhr.status == 200, "standard get should be 200");
+ var resp = JSON.parse(xhr.responseText);
+ T(typeof(resp) == "object");
+
var erlViewTest = function() {
T(db.save(erlListDoc).ok);
var url = "/test_suite_db/_design/erlang/_list/simple/views/basicView" +
@@ -408,6 +439,8 @@ couchTests.list_views = function(debug) {
}
};
+
+
run_on_modified_server([{
section: "native_query_servers",
key: "erlang",
diff --git a/share/www/script/test/method_override.js b/share/www/script/test/method_override.js
new file mode 100644
index 00000000..0bb4c61f
--- /dev/null
+++ b/share/www/script/test/method_override.js
@@ -0,0 +1,40 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Allow broken HTTP clients to fake a full method vocabulary with an X-HTTP-METHOD-OVERRIDE header
+couchTests.method_override = function(debug) {
+ var result = JSON.parse(CouchDB.request("GET", "/").responseText);
+ T(result.couchdb == "Welcome");
+
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
+ db.deleteDb();
+
+ db.createDb();
+
+ var doc = {bob : "connie"};
+ xhr = CouchDB.request("POST", "/test_suite_db/fnord", {body: JSON.stringify(doc), headers:{"X-HTTP-Method-Override" : "PUT"}});
+ T(xhr.status == 201);
+
+ doc = db.open("fnord");
+ T(doc.bob == "connie");
+
+ xhr = CouchDB.request("POST", "/test_suite_db/fnord?rev=" + doc._rev, {headers:{"X-HTTP-Method-Override" : "DELETE"}});
+ T(xhr.status == 200);
+
+ xhr = CouchDB.request("GET", "/test_suite_db/fnord2", {body: JSON.stringify(doc), headers:{"X-HTTP-Method-Override" : "PUT"}});
+ // Method Override is ignored when original Method isn't POST
+ T(xhr.status == 404);
+
+ doc = db.open("fnord");
+ T(doc == null);
+
+};
diff --git a/share/www/script/test/oauth.js b/share/www/script/test/oauth.js
index d55d13e8..82ebe8a4 100644
--- a/share/www/script/test/oauth.js
+++ b/share/www/script/test/oauth.js
@@ -71,7 +71,7 @@ couchTests.oauth = function(debug) {
var host = CouchDB.host;
var dbPair = {
source: {
- url: "http://" + host + "/test_suite_db_a",
+ url: CouchDB.protocol + host + "/test_suite_db_a",
auth: {
oauth: {
consumer_key: "key",
@@ -82,7 +82,7 @@ couchTests.oauth = function(debug) {
}
},
target: {
- url: "http://" + host + "/test_suite_db_b",
+ url: CouchDB.protocol + host + "/test_suite_db_b",
headers: {"Authorization": adminBasicAuthHeaderValue()}
}
};
@@ -90,16 +90,26 @@ couchTests.oauth = function(debug) {
// this function will be called on the modified server
var testFun = function () {
try {
- CouchDB.request("PUT", "http://" + host + "/_config/admins/testadmin", {
+ CouchDB.request("PUT", CouchDB.protocol + host + "/_config/admins/testadmin", {
headers: {"X-Couch-Persist": "false"},
body: JSON.stringify(testadminPassword)
});
-
- CouchDB.request("GET", "/_sleep?time=50");
+ var i = 0;
+ waitForSuccess(function() {
+ //loop until the couch server has processed the password
+ i += 1;
+ var xhr = CouchDB.request("GET", CouchDB.protocol + host + "/_config/admins/testadmin?foo="+i,{
+ headers: {
+ "Authorization": adminBasicAuthHeaderValue()
+ }});
+ if (xhr.responseText.indexOf("\"-hashed-") != 0) {
+ throw("still waiting");
+ }
+ }, "wait-for-admin");
CouchDB.newUuids(2); // so we have one to make the salt
- CouchDB.request("PUT", "http://" + host + "/_config/couch_httpd_auth/require_valid_user", {
+ CouchDB.request("PUT", CouchDB.protocol + host + "/_config/couch_httpd_auth/require_valid_user", {
headers: {
"X-Couch-Persist": "false",
"Authorization": adminBasicAuthHeaderValue()
@@ -116,7 +126,7 @@ couchTests.oauth = function(debug) {
// Create a user
var jasonUserDoc = CouchDB.prepareUserDoc({
- username: "jason",
+ name: "jason",
roles: ["test"]
}, "testpassword");
T(usersDb.save(jasonUserDoc).ok);
@@ -147,11 +157,11 @@ couchTests.oauth = function(debug) {
};
// Get request token via Authorization header
- xhr = oauthRequest("GET", "http://" + host + "/_oauth/request_token", message, accessor);
+ xhr = oauthRequest("GET", CouchDB.protocol + host + "/_oauth/request_token", message, accessor);
T(xhr.status == expectedCode);
// GET request token via query parameters
- xhr = oauthRequest("GET", "http://" + host + "/_oauth/request_token", message, accessor);
+ xhr = oauthRequest("GET", CouchDB.protocol + host + "/_oauth/request_token", message, accessor);
T(xhr.status == expectedCode);
responseMessage = OAuth.decodeForm(xhr.responseText);
@@ -161,7 +171,7 @@ couchTests.oauth = function(debug) {
//xhr = CouchDB.request("GET", authorization_url + '?oauth_token=' + responseMessage.oauth_token);
//T(xhr.status == expectedCode);
- xhr = oauthRequest("GET", "http://" + host + "/_session", message, accessor);
+ xhr = oauthRequest("GET", CouchDB.protocol + host + "/_session", message, accessor);
T(xhr.status == expectedCode);
if (xhr.status == expectedCode == 200) {
data = JSON.parse(xhr.responseText);
@@ -169,11 +179,11 @@ couchTests.oauth = function(debug) {
T(data.roles[0] == "test");
}
- xhr = oauthRequest("GET", "http://" + host + "/_session?foo=bar", message, accessor);
+ xhr = oauthRequest("GET", CouchDB.protocol + host + "/_session?foo=bar", message, accessor);
T(xhr.status == expectedCode);
// Test HEAD method
- xhr = oauthRequest("HEAD", "http://" + host + "/_session?foo=bar", message, accessor);
+ xhr = oauthRequest("HEAD", CouchDB.protocol + host + "/_session?foo=bar", message, accessor);
T(xhr.status == expectedCode);
// Replication
@@ -197,7 +207,7 @@ couchTests.oauth = function(debug) {
oauth_version: "1.0"
}
};
- xhr = oauthRequest("GET", "http://" + host + "/_session?foo=bar", message, adminAccessor);
+ xhr = oauthRequest("GET", CouchDB.protocol + host + "/_session?foo=bar", message, adminAccessor);
if (xhr.status == expectedCode == 200) {
data = JSON.parse(xhr.responseText);
T(data.name == "testadmin");
@@ -206,13 +216,13 @@ couchTests.oauth = function(debug) {
// Test when the user's token doesn't exist.
message.parameters.oauth_token = "not a token!";
- xhr = oauthRequest("GET", "http://" + host + "/_session?foo=bar",
+ xhr = oauthRequest("GET", CouchDB.protocol + host + "/_session?foo=bar",
message, adminAccessor);
T(xhr.status == 400, "Request should be invalid.");
}
}
} finally {
- var xhr = CouchDB.request("PUT", "http://" + host + "/_config/couch_httpd_auth/require_valid_user", {
+ var xhr = CouchDB.request("PUT", CouchDB.protocol + host + "/_config/couch_httpd_auth/require_valid_user", {
headers: {
"Authorization": adminBasicAuthHeaderValue(),
"X-Couch-Persist": "false"
@@ -221,7 +231,7 @@ couchTests.oauth = function(debug) {
});
T(xhr.status == 200);
- var xhr = CouchDB.request("DELETE", "http://" + host + "/_config/admins/testadmin", {
+ var xhr = CouchDB.request("DELETE", CouchDB.protocol + host + "/_config/admins/testadmin", {
headers: {
"Authorization": adminBasicAuthHeaderValue(),
"X-Couch-Persist": "false"
diff --git a/share/www/script/test/proxyauth.js b/share/www/script/test/proxyauth.js
new file mode 100644
index 00000000..40af0089
--- /dev/null
+++ b/share/www/script/test/proxyauth.js
@@ -0,0 +1,130 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+
+
+couchTests.proxyauth = function(debug) {
+ // this test proxy authentification handler
+
+ var usersDb = new CouchDB("test_suite_users", {"X-Couch-Full-Commit":"false"});
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
+
+ if (debug) debugger;
+
+ // Simple secret key generator
+ function generateSecret(length) {
+ var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+ var secret = '';
+ for (var i=0; i<length; i++) {
+ secret += tab.charAt(Math.floor(Math.random() * 64));
+ }
+ return secret;
+ }
+
+ var secret = generateSecret(64);
+
+ function TestFun() {
+ usersDb.deleteDb();
+ usersDb.createDb();
+ db.deleteDb();
+ db.createDb();
+
+ var benoitcUserDoc = CouchDB.prepareUserDoc({
+ name: "benoitc@apache.org"
+ }, "test");
+ T(usersDb.save(benoitcUserDoc).ok);
+
+ T(CouchDB.session().userCtx.name == null);
+
+ // test that you can use basic auth aginst the users db
+ var s = CouchDB.session({
+ headers : {
+ "Authorization" : "Basic YmVub2l0Y0BhcGFjaGUub3JnOnRlc3Q="
+ }
+ });
+ T(s.userCtx.name == "benoitc@apache.org");
+ T(s.info.authenticated == "default");
+
+ CouchDB.logout();
+
+ var headers = {
+ "X-Auth-CouchDB-UserName": "benoitc@apache.org",
+ "X-Auth-CouchDB-Roles": "test",
+ "X-Auth-CouchDB-Token": hex_hmac_sha1(secret, "benoitc@apache.org")
+ };
+
+ var designDoc = {
+ _id:"_design/test",
+ language: "javascript",
+
+ shows: {
+ "welcome": stringFun(function(doc,req) {
+ return "Welcome " + req.userCtx["name"];
+ }),
+ "role": stringFun(function(doc, req) {
+ return req.userCtx['roles'][0];
+ })
+ }
+ };
+
+ db.save(designDoc);
+
+ var req = CouchDB.request("GET", "/test_suite_db/_design/test/_show/welcome",
+ {headers: headers});
+ T(req.responseText == "Welcome benoitc@apache.org");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_show/role",
+ {headers: headers});
+ T(req.responseText == "test");
+
+ var xhr = CouchDB.request("PUT", "/_config/couch_httpd_auth/proxy_use_secret",{
+ body : JSON.stringify("true"),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ T(xhr.status == 200);
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_show/welcome",
+ {headers: headers});
+ T(req.responseText == "Welcome benoitc@apache.org");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_show/role",
+ {headers: headers});
+ T(req.responseText == "test");
+
+ }
+
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "authentication_handlers",
+ value:"{couch_httpd_auth, proxy_authentification_handler}, {couch_httpd_auth, default_authentication_handler}"},
+ {section: "couch_httpd_auth",
+ key: "authentication_db",
+ value: "test_suite_users"},
+ {section: "couch_httpd_auth",
+ key: "secret",
+ value: secret},
+ {section: "couch_httpd_auth",
+ key: "x_auth_username",
+ value: "X-Auth-CouchDB-UserName"},
+ {section: "couch_httpd_auth",
+ key: "x_auth_roles",
+ value: "X-Auth-CouchDB-Roles"},
+ {section: "couch_httpd_auth",
+ key: "x_auth_token",
+ value: "X-Auth-CouchDB-Token"},
+ {section: "couch_httpd_auth",
+ key: "proxy_use_secret",
+ value: "false"}],
+ TestFun
+ );
+
+}; \ No newline at end of file
diff --git a/share/www/script/test/purge.js b/share/www/script/test/purge.js
index ca68b363..af72ea4f 100644
--- a/share/www/script/test/purge.js
+++ b/share/www/script/test/purge.js
@@ -30,7 +30,7 @@ couchTests.purge = function(debug) {
all_docs_twice: {map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }"},
single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"}
}
- }
+ };
T(db.save(designDoc).ok);
@@ -50,17 +50,19 @@ couchTests.purge = function(debug) {
// purge the documents
var xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
- body: JSON.stringify({"1":[doc1._rev], "2":[doc2._rev]}),
+ body: JSON.stringify({"1":[doc1._rev], "2":[doc2._rev]})
});
T(xhr.status == 200);
+ var result = JSON.parse(xhr.responseText);
var newInfo = db.info();
+
// purging increments the update sequence
T(info.update_seq+1 == newInfo.update_seq);
// and it increments the purge_seq
T(info.purge_seq+1 == newInfo.purge_seq);
+ T(result.purge_seq == newInfo.purge_seq);
- var result = JSON.parse(xhr.responseText);
T(result.purged["1"][0] == doc1._rev);
T(result.purged["2"][0] == doc2._rev);
@@ -81,16 +83,18 @@ couchTests.purge = function(debug) {
var doc4 = db.open("4");
xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
- body: JSON.stringify({"3":[doc3._rev]}),
+ body: JSON.stringify({"3":[doc3._rev]})
});
T(xhr.status == 200);
xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
- body: JSON.stringify({"4":[doc4._rev]}),
+ body: JSON.stringify({"4":[doc4._rev]})
});
T(xhr.status == 200);
+ result = JSON.parse(xhr.responseText);
+ T(result.purge_seq == db.info().purge_seq);
var rows = db.view("test/all_docs_twice").rows;
for (var i = 4; i < numDocs; i++) {
diff --git a/share/www/script/test/reader_acl.js b/share/www/script/test/reader_acl.js
new file mode 100644
index 00000000..cc249ea4
--- /dev/null
+++ b/share/www/script/test/reader_acl.js
@@ -0,0 +1,198 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy
+// of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+couchTests.reader_acl = function(debug) {
+ // this tests read access control
+
+ var usersDb = new CouchDB("test_suite_users", {"X-Couch-Full-Commit":"false"});
+ var secretDb = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
+ function testFun() {
+ try {
+ usersDb.deleteDb();
+ usersDb.createDb();
+ secretDb.deleteDb();
+ secretDb.createDb();
+
+ // create a user with top-secret-clearance
+ var jchrisUserDoc = CouchDB.prepareUserDoc({
+ name: "jchris@apache.org",
+ roles : ["top-secret"]
+ }, "funnybone");
+ T(usersDb.save(jchrisUserDoc).ok);
+ usersDb.ensureFullCommit();
+
+ T(CouchDB.session().userCtx.name == null);
+
+ // set secret db to be read controlled
+ T(secretDb.save({_id:"baz",foo:"bar"}).ok);
+ T(secretDb.open("baz").foo == "bar");
+
+ T(secretDb.setSecObj({
+ "readers" : {
+ roles : ["super-secret-club"],
+ names : ["joe","barb"]
+ }
+ }).ok);
+ } finally {
+ CouchDB.logout();
+ }
+ }
+
+ // split into 2 funs so we can test restart behavior
+ function testFun2() {
+ try {
+ // can't read it as jchris b/c he's missing the needed role
+ T(CouchDB.login("jchris@apache.org", "funnybone").ok);
+ T(CouchDB.session().userCtx.name == "jchris@apache.org");
+
+ try {
+ secretDb.open("baz");
+ T(false && "can't open a doc from a secret db") ;
+ } catch(e) {
+ T(true)
+ }
+
+ CouchDB.logout();
+
+ // make anyone with the top-secret role an admin
+ // db admins are automatically readers
+ T(secretDb.setSecObj({
+ "admins" : {
+ roles : ["top-secret"],
+ names : []
+ },
+ "readers" : {
+ roles : ["super-secret-club"],
+ names : ["joe","barb"]
+ }
+ }).ok);
+
+
+ T(CouchDB.login("jchris@apache.org", "funnybone").ok);
+
+ // db admin can read
+ T(secretDb.open("baz").foo == "bar");
+
+ // and run temp views
+ TEquals(secretDb.query(function(doc) {
+ emit(null, null)
+ }).total_rows, 1);
+
+ CouchDB.logout();
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") != -1);
+
+ // admin now adds the top-secret role to the db's readers
+ // and removes db-admins
+ T(secretDb.setSecObj({
+ "admins" : {
+ roles : [],
+ names : []
+ },
+ "readers" : {
+ roles : ["super-secret-club", "top-secret"],
+ names : ["joe","barb"]
+ }
+ }).ok);
+
+ // server _admin can always read
+ T(secretDb.open("baz").foo == "bar");
+
+ // and run temp views
+ TEquals(secretDb.query(function(doc) {
+ emit(null, null)
+ }).total_rows, 1);
+
+ T(secretDb.save({
+ "_id" : "_design/foo",
+ views : {
+ bar : {
+ map : "function(doc){emit(null, null)}"
+ }
+ }
+ }).ok)
+
+ // now top-secret users can read too
+ T(CouchDB.login("jchris@apache.org", "funnybone").ok);
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") == -1);
+ T(secretDb.open("baz").foo == "bar");
+ // readers can query stored views
+ T(secretDb.view("foo/bar").total_rows == 1);
+
+ // readers can't do temp views
+ try {
+ var results = secretDb.query(function(doc) {
+ emit(null, null);
+ });
+ T(false && "temp view should be admin only");
+ } catch (e) {
+ T(true && "temp view is admin only");
+ }
+
+
+ CouchDB.logout();
+
+ // can't set non string reader names or roles
+ try {
+ secretDb.setSecObj({
+ "readers" : {
+ roles : ["super-secret-club", {"top-secret":"awesome"}],
+ names : ["joe","barb"]
+ }
+ })
+ T(false && "only string roles");
+ } catch (e) {}
+
+ try {
+ secretDb.setSecObj({
+ "readers" : {
+ roles : ["super-secret-club", {"top-secret":"awesome"}],
+ names : ["joe",22]
+ }
+ });
+ T(false && "only string names");
+ } catch (e) {}
+
+ try {
+ secretDb.setSecObj({
+ "readers" : {
+ roles : ["super-secret-club", {"top-secret":"awesome"}],
+ names : "joe"
+ }
+ });
+ T(false && "only lists of names");
+ } catch (e) {}
+ } finally {
+ CouchDB.logout();
+ }
+ };
+
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "authentication_handlers",
+ value: "{couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler}"},
+ {section: "couch_httpd_auth",
+ key: "authentication_db", value: "test_suite_users"}],
+ testFun
+ );
+
+ // security changes will always commit synchronously
+ restartServer();
+
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "authentication_handlers",
+ value: "{couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler}"},
+ {section: "couch_httpd_auth",
+ key: "authentication_db", value: "test_suite_users"}],
+ testFun2
+ );
+}
diff --git a/share/www/script/test/recreate_doc.js b/share/www/script/test/recreate_doc.js
index a6a64ac0..05843558 100644
--- a/share/www/script/test/recreate_doc.js
+++ b/share/www/script/test/recreate_doc.js
@@ -51,7 +51,7 @@ couchTests.recreate_doc = function(debug) {
data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
}
}
- }
+ };
try {
// same as before, but with binary
db.save(binAttDoc);
diff --git a/share/www/script/test/reduce.js b/share/www/script/test/reduce.js
index 9c80fa7f..979a0292 100644
--- a/share/www/script/test/reduce.js
+++ b/share/www/script/test/reduce.js
@@ -15,14 +15,15 @@ couchTests.reduce = function(debug) {
db.deleteDb();
db.createDb();
if (debug) debugger;
- var numDocs = 500
+ var numDocs = 500;
var docs = makeDocs(1,numDocs + 1);
db.bulkSave(docs);
var summate = function(N) {return (N+1)*N/2;};
var map = function (doc) {
emit(doc.integer, doc.integer);
- emit(doc.integer, doc.integer)};
+ emit(doc.integer, doc.integer);
+ };
var reduce = function (keys, values) { return sum(values); };
var result = db.query(map, reduce);
T(result.rows[0].value == 2*summate(numDocs));
@@ -69,7 +70,7 @@ couchTests.reduce = function(debug) {
T(db.info().doc_count == ((i - 1) * 10 * 11) + ((j + 1) * 11));
}
- map = function (doc) {emit(doc.keys, 1)};
+ map = function (doc) { emit(doc.keys, 1); };
reduce = function (keys, values) { return sum(values); };
var results = db.query(map, reduce, {group:true});
@@ -107,7 +108,7 @@ couchTests.reduce = function(debug) {
db.createDb();
- var map = function (doc) {emit(doc.val, doc.val)};
+ var map = function (doc) { emit(doc.val, doc.val); };
var reduceCombine = function (keys, values, rereduce) {
// This computes the standard deviation of the mapped results
var stdDeviation=0.0;
diff --git a/share/www/script/test/reduce_builtin.js b/share/www/script/test/reduce_builtin.js
index 7938a0cf..b3cc3cc7 100644
--- a/share/www/script/test/reduce_builtin.js
+++ b/share/www/script/test/reduce_builtin.js
@@ -16,22 +16,37 @@ couchTests.reduce_builtin = function(debug) {
db.createDb();
if (debug) debugger;
- var numDocs = 500
+ var numDocs = 500;
var docs = makeDocs(1,numDocs + 1);
db.bulkSave(docs);
var summate = function(N) {return (N+1)*N/2;};
+ var sumsqr = function(N) {
+ var acc = 0;
+ for (var i=1; i<=N; ++i) {
+ acc += i*i;
+ }
+ return acc;
+ };
+
// this is the same test as the reduce.js test
// only we'll let CouchDB run reduce in Erlang
var map = function (doc) {
emit(doc.integer, doc.integer);
- emit(doc.integer, doc.integer)};
+ emit(doc.integer, doc.integer);
+ };
var result = db.query(map, "_sum");
T(result.rows[0].value == 2*summate(numDocs));
result = db.query(map, "_count");
T(result.rows[0].value == 1000);
+ result = db.query(map, "_stats");
+ T(result.rows[0].value.sum == 2*summate(numDocs));
+ T(result.rows[0].value.count == 1000);
+ T(result.rows[0].value.min == 1);
+ T(result.rows[0].value.max == 500);
+ T(result.rows[0].value.sumsqr == 2*sumsqr(numDocs));
result = db.query(map, "_sum", {startkey: 4, endkey: 4});
T(result.rows[0].value == 8);
@@ -58,6 +73,26 @@ couchTests.reduce_builtin = function(debug) {
T(result.rows[0].value == 2*(summate(numDocs-i) - summate(i-1)));
}
+ // test for trailing characters after builtin functions, desired behaviour
+ // is to disregard any trailing characters
+ // I think the behavior should be a prefix test, so that even "_statsorama"
+ // or "_stats\nare\awesome" should work just as "_stats" does. - JChris
+
+ var trailing = ["\u000a", "orama", "\nare\nawesome", " ", " \n "];
+
+ for(var i=0; i < trailing.length; i++) {
+ result = db.query(map, "_sum" + trailing[i]);
+ T(result.rows[0].value == 2*summate(numDocs));
+ result = db.query(map, "_count" + trailing[i]);
+ T(result.rows[0].value == 1000);
+ result = db.query(map, "_stats" + trailing[i]);
+ T(result.rows[0].value.sum == 2*summate(numDocs));
+ T(result.rows[0].value.count == 1000);
+ T(result.rows[0].value.min == 1);
+ T(result.rows[0].value.max == 500);
+ T(result.rows[0].value.sumsqr == 2*sumsqr(numDocs));
+ }
+
db.deleteDb();
db.createDb();
@@ -81,7 +116,7 @@ couchTests.reduce_builtin = function(debug) {
T(db.info().doc_count == ((i - 1) * 10 * 11) + ((j + 1) * 11));
}
- map = function (doc) {emit(doc.keys, 1)};
+ map = function (doc) { emit(doc.keys, 1); };
// with emitted values being 1, count should be the same as sum
var builtins = ["_sum", "_count"];
@@ -115,5 +150,30 @@ couchTests.reduce_builtin = function(debug) {
T(equals(results.rows[5], {key:["d","b"],value:10*i}));
T(equals(results.rows[6], {key:["d","c"],value:10*i}));
};
+
+ map = function (doc) { emit(doc.keys, [1, 1]); };
+
+ var results = db.query(map, "_sum", {group:true});
+ T(equals(results.rows[0], {key:["a"],value:[20*i,20*i]}));
+ T(equals(results.rows[1], {key:["a","b"],value:[20*i,20*i]}));
+ T(equals(results.rows[2], {key:["a", "b", "c"],value:[10*i,10*i]}));
+ T(equals(results.rows[3], {key:["a", "b", "d"],value:[10*i,10*i]}));
+
+ var results = db.query(map, "_sum", {group: true, limit: 2});
+ T(equals(results.rows[0], {key: ["a"], value: [20*i,20*i]}));
+ T(equals(results.rows.length, 2));
+
+ var results = db.query(map, "_sum", {group_level:1});
+ T(equals(results.rows[0], {key:["a"],value:[70*i,70*i]}));
+ T(equals(results.rows[1], {key:["d"],value:[40*i,40*i]}));
+
+ var results = db.query(map, "_sum", {group_level:2});
+ T(equals(results.rows[0], {key:["a"],value:[20*i,20*i]}));
+ T(equals(results.rows[1], {key:["a","b"],value:[40*i,40*i]}));
+ T(equals(results.rows[2], {key:["a","c"],value:[10*i,10*i]}));
+ T(equals(results.rows[3], {key:["d"],value:[10*i,10*i]}));
+ T(equals(results.rows[4], {key:["d","a"],value:[10*i,10*i]}));
+ T(equals(results.rows[5], {key:["d","b"],value:[10*i,10*i]}));
+ T(equals(results.rows[6], {key:["d","c"],value:[10*i,10*i]}));
}
}
diff --git a/share/www/script/test/replication.js b/share/www/script/test/replication.js
index 78678937..5acff4ab 100644
--- a/share/www/script/test/replication.js
+++ b/share/www/script/test/replication.js
@@ -17,19 +17,19 @@ couchTests.replication = function(debug) {
{source:"test_suite_db_a",
target:"test_suite_db_b"},
{source:"test_suite_db_a",
- target:"http://" + host + "/test_suite_db_b"},
- {source:"http://" + host + "/test_suite_db_a",
+ target:CouchDB.protocol + host + "/test_suite_db_b"},
+ {source:CouchDB.protocol + host + "/test_suite_db_a",
target:"test_suite_db_b"},
- {source:"http://" + host + "/test_suite_db_a",
- target:"http://" + host + "/test_suite_db_b"}
- ]
+ {source:CouchDB.protocol + host + "/test_suite_db_a",
+ target:CouchDB.protocol + host + "/test_suite_db_b"}
+ ];
var dbA = new CouchDB("test_suite_db_a", {"X-Couch-Full-Commit":"false"});
var dbB = new CouchDB("test_suite_db_b", {"X-Couch-Full-Commit":"false"});
var numDocs = 10;
var xhr;
for (var testPair = 0; testPair < dbPairs.length; testPair++) {
- var A = dbPairs[testPair].source
- var B = dbPairs[testPair].target
+ var A = dbPairs[testPair].source;
+ var B = dbPairs[testPair].target;
dbA.deleteDb();
dbA.createDb();
@@ -41,7 +41,7 @@ couchTests.replication = function(debug) {
test_template: new function () {
this.init = function(dbA, dbB) {
// before anything has happened
- }
+ };
this.afterAB1 = function(dbA, dbB) {
// called after replicating src=A tgt=B first time.
};
@@ -165,20 +165,20 @@ couchTests.replication = function(debug) {
this.afterAB1 = function(dbA, dbB) {
var xhr = CouchDB.request("GET",
"/test_suite_db_a/bin_doc/foo%2Bbar.txt");
- T(xhr.responseText == "This is a base64 encoded text")
+ T(xhr.responseText == "This is a base64 encoded text");
xhr = CouchDB.request("GET",
"/test_suite_db_b/bin_doc/foo%2Bbar.txt");
- T(xhr.responseText == "This is a base64 encoded text")
+ T(xhr.responseText == "This is a base64 encoded text");
// and the design-doc
xhr = CouchDB.request("GET",
"/test_suite_db_a/_design/with_bin/foo%2Bbar.txt");
- T(xhr.responseText == "This is a base64 encoded text")
+ T(xhr.responseText == "This is a base64 encoded text");
xhr = CouchDB.request("GET",
"/test_suite_db_b/_design/with_bin/foo%2Bbar.txt");
- T(xhr.responseText == "This is a base64 encoded text")
+ T(xhr.responseText == "This is a base64 encoded text");
};
},
@@ -209,8 +209,8 @@ couchTests.replication = function(debug) {
var docB = dbB.open("foo", {conflicts: true, deleted_conflicts: true});
// We should have no conflicts this time
- T(docA._conflicts === undefined)
- T(docB._conflicts === undefined);
+ T(typeof docA._conflicts === "undefined");
+ T(typeof docB._conflicts === "undefined");
// They show up as deleted conflicts instead
T(docA._deleted_conflicts[0] == docB._deleted_conflicts[0]);
@@ -229,7 +229,7 @@ couchTests.replication = function(debug) {
var seqA = result.source_last_seq;
T(0 == result.history[0].start_last_seq);
- T(result.history[1] === undefined)
+ T(typeof result.history[1] === "undefined");
for(test in repTests) {
if(repTests[test].afterAB1) repTests[test].afterAB1(dbA, dbB);
@@ -239,7 +239,7 @@ couchTests.replication = function(debug) {
var seqB = result.source_last_seq;
T(0 == result.history[0].start_last_seq);
- T(result.history[1] === undefined)
+ T(typeof result.history[1] === "undefined");
for(test in repTests) {
if(repTests[test].afterBA1) repTests[test].afterBA1(dbA, dbB);
@@ -252,7 +252,7 @@ couchTests.replication = function(debug) {
T(seqA < result2.source_last_seq);
T(seqA == result2.history[0].start_last_seq);
- T(result2.history[1].end_last_seq == seqA)
+ T(result2.history[1].end_last_seq == seqA);
seqA = result2.source_last_seq;
@@ -260,11 +260,11 @@ couchTests.replication = function(debug) {
if(repTests[test].afterAB2) repTests[test].afterAB2(dbA, dbB);
}
- result = CouchDB.replicate(B, A)
+ result = CouchDB.replicate(B, A);
T(seqB < result.source_last_seq);
T(seqB == result.history[0].start_last_seq);
- T(result.history[1].end_last_seq == seqB)
+ T(result.history[1].end_last_seq == seqB);
seqB = result.source_last_seq;
@@ -296,9 +296,436 @@ couchTests.replication = function(debug) {
// remote
dbB.deleteDb();
- CouchDB.replicate(dbA.name, "http://" + CouchDB.host + "/test_suite_db_b", {
+ CouchDB.replicate(dbA.name, CouchDB.protocol + CouchDB.host + "/test_suite_db_b", {
body: {"create_target": true}
});
TEquals("test_suite_db_b", dbB.info().db_name,
"Target database should exist");
+
+ // continuous
+ var continuousResult = CouchDB.replicate(dbA.name, "test_suite_db_b", {
+ body: {"continuous": true}
+ });
+ T(continuousResult.ok);
+ T(continuousResult._local_id);
+
+ var cancelResult = CouchDB.replicate(dbA.name, "test_suite_db_b", {
+ body: {"cancel": true}
+ });
+ T(cancelResult.ok);
+ T(continuousResult._local_id == cancelResult._local_id);
+
+ try {
+ var cancelResult2 = CouchDB.replicate(dbA.name, "test_suite_db_b", {
+ body: {"cancel": true}
+ });
+ } catch (e) {
+ T(e.error == "not_found");
+ }
+ // test replication object option doc_ids
+
+ var dbA = new CouchDB("test_suite_rep_docs_db_a", {"X-Couch-Full-Commit":"false"});
+ var dbB = new CouchDB("test_suite_rep_docs_db_b", {"X-Couch-Full-Commit":"false"});
+
+ dbA.deleteDb();
+ dbA.createDb();
+
+ var all_docs = [
+ {
+ _id: "foo1",
+ value: "a"
+ },
+ {
+ _id: "foo2",
+ value: "b"
+ },
+ {
+ _id: "foo3",
+ value: "c"
+ },
+ {
+ _id: "slashed/foo",
+ value: "s"
+ },
+ {
+ _id: "_design/foobar",
+ language: "javascript",
+ value: "I am a design doc",
+ filters: {
+ idfilter: (function(doc, req) {
+ return doc.value == Number(req.filter_value);
+ }).toString()
+ },
+ views: {
+ countview: (function(doc) {
+ emit(doc.value, 1);
+ }).toString()
+ }
+ }
+ ];
+
+ for (var i = 0; i < all_docs.length; i++) {
+ T(dbA.save(all_docs[i]).ok);
+ }
+
+ var dbPairs = [
+ {source:"test_suite_rep_docs_db_a",
+ target:"test_suite_rep_docs_db_b"},
+ {source:"test_suite_rep_docs_db_a",
+ target:CouchDB.protocol + host + "/test_suite_rep_docs_db_b"},
+ {source:CouchDB.protocol + host + "/test_suite_rep_docs_db_a",
+ target:"test_suite_rep_docs_db_b"},
+ {source:CouchDB.protocol + host + "/test_suite_rep_docs_db_a",
+ target:CouchDB.protocol + host + "/test_suite_rep_docs_db_b"}
+ ];
+
+ var target_doc_ids = [
+ ["foo1", "foo3", "foo666"],
+ ["foo1", "foo666"],
+ ["foo666", "foo2"],
+ ["foo2", "foo9999", "foo1"],
+ ["foo3", "slashed/foo"],
+ ["foo3", "slashed%2Ffoo"],
+ ["foo1", "_design/foobar"],
+ ["foo1", "foo1001", "_design%2Ffoobar"]
+ ];
+
+ for (var i = 0; i < dbPairs.length; i++) {
+ var src_db = dbPairs[i].source;
+ var tgt_db = dbPairs[i].target;
+
+ for (var j = 0; j < target_doc_ids.length; j++) {
+ var doc_ids = target_doc_ids[j];
+ var valid_doc_ids = [];
+ var invalid_doc_ids = [];
+
+ for (var p = 0; p < doc_ids.length; p++) {
+ var id = doc_ids[p];
+ var found = false;
+
+ for (var k = 0; k < all_docs.length; k++) {
+ var doc = all_docs[k];
+
+ if (id === doc._id) {
+ found = true;
+ break;
+ }
+ }
+
+ if (found) {
+ valid_doc_ids.push(id);
+ } else {
+ invalid_doc_ids.push(id);
+ }
+ };
+
+ dbB.deleteDb();
+ dbB.createDb();
+
+ var repResult = CouchDB.replicate(src_db, tgt_db, {
+ body: {"doc_ids": doc_ids}
+ });
+
+ T(repResult.ok);
+ T(repResult.docs_written === valid_doc_ids.length);
+ T(repResult.docs_read === valid_doc_ids.length);
+ T(repResult.doc_write_failures === 0);
+
+ for (var k = 0; k < all_docs.length; k++) {
+ var doc = all_docs[k];
+ var tgt_doc = dbB.open(doc._id);
+
+ if (doc_ids.indexOf(doc._id) >= 0) {
+ T(tgt_doc !== null);
+ T(tgt_doc.value === doc.value);
+ } else {
+ T(tgt_doc === null);
+ }
+ }
+
+ for (var k = 0; k < invalid_doc_ids.length; k++) {
+ var tgt_doc = dbB.open(invalid_doc_ids[k]);
+
+ T(tgt_doc === null);
+ }
+ }
+ }
+
+ // test filtered replication
+ var filterFun1 = (function(doc, req) {
+ if (doc.value < Number(req.query.maxvalue)) {
+ return true;
+ } else {
+ return false;
+ }
+ }).toString();
+
+ var filterFun2 = (function(doc, req) {
+ return true;
+ }).toString();
+
+ var dbPairs = [
+ {source:"test_suite_filtered_rep_db_a",
+ target:"test_suite_filtered_rep_db_b"},
+ {source:"test_suite_filtered_rep_db_a",
+ target:CouchDB.protocol + host + "/test_suite_filtered_rep_db_b"},
+ {source:CouchDB.protocol + host + "/test_suite_filtered_rep_db_a",
+ target:"test_suite_filtered_rep_db_b"},
+ {source:CouchDB.protocol + host + "/test_suite_filtered_rep_db_a",
+ target:CouchDB.protocol + host + "/test_suite_filtered_rep_db_b"}
+ ];
+ var sourceDb = new CouchDB("test_suite_filtered_rep_db_a");
+ var targetDb = new CouchDB("test_suite_filtered_rep_db_b");
+
+ for (var i = 0; i < dbPairs.length; i++) {
+ sourceDb.deleteDb();
+ sourceDb.createDb();
+
+ T(sourceDb.save({_id: "foo1", value: 1}).ok);
+ T(sourceDb.save({_id: "foo2", value: 2}).ok);
+ T(sourceDb.save({_id: "foo3", value: 3}).ok);
+ T(sourceDb.save({_id: "foo4", value: 4}).ok);
+
+ var ddoc = {
+ "_id": "_design/mydesign",
+ "language": "javascript",
+ "filters": {
+ "myfilter": filterFun1
+ }
+ };
+
+ T(sourceDb.save(ddoc).ok);
+
+ targetDb.deleteDb();
+ targetDb.createDb();
+
+ var dbA = dbPairs[i].source;
+ var dbB = dbPairs[i].target;
+
+ var repResult = CouchDB.replicate(dbA, dbB, {
+ body: {
+ "filter" : "mydesign/myfilter",
+ "query_params" : {
+ "maxvalue": "3"
+ }
+ }
+ });
+
+ T(repResult.ok);
+ T(repResult.history instanceof Array);
+ T(repResult.history.length === 1);
+ T(repResult.history[0].docs_written === 2);
+ T(repResult.history[0].docs_read === 2);
+ T(repResult.history[0].doc_write_failures === 0);
+
+ var docFoo1 = targetDb.open("foo1");
+ T(docFoo1 !== null);
+ T(docFoo1.value === 1);
+
+ var docFoo2 = targetDb.open("foo2");
+ T(docFoo2 !== null);
+ T(docFoo2.value === 2);
+
+ var docFoo3 = targetDb.open("foo3");
+ T(docFoo3 === null);
+
+ var docFoo4 = targetDb.open("foo4");
+ T(docFoo4 === null);
+
+ // replication should start from scratch after the filter's code changed
+
+ ddoc.filters.myfilter = filterFun2;
+ T(sourceDb.save(ddoc).ok);
+
+ repResult = CouchDB.replicate(dbA, dbB, {
+ body: {
+ "filter" : "mydesign/myfilter",
+ "query_params" : {
+ "maxvalue": "3"
+ }
+ }
+ });
+
+ T(repResult.ok);
+ T(repResult.history instanceof Array);
+ T(repResult.history.length === 1);
+ T(repResult.history[0].docs_written === 3);
+ T(repResult.history[0].docs_read === 3);
+ T(repResult.history[0].doc_write_failures === 0);
+
+ docFoo1 = targetDb.open("foo1");
+ T(docFoo1 !== null);
+ T(docFoo1.value === 1);
+
+ docFoo2 = targetDb.open("foo2");
+ T(docFoo2 !== null);
+ T(docFoo2.value === 2);
+
+ docFoo3 = targetDb.open("foo3");
+ T(docFoo3 !== null);
+ T(docFoo3.value === 3);
+
+ docFoo4 = targetDb.open("foo4");
+ T(docFoo4 !== null);
+ T(docFoo4.value === 4);
+
+ T(targetDb.open("_design/mydesign") !== null);
+ }
+
+ // test for COUCHDB-868 - design docs' attachments not getting replicated
+ // when doing a pull replication with HTTP basic auth
+ dbA = new CouchDB("test_suite_db_a");
+ dbB = new CouchDB("test_suite_db_b");
+ var usersDb = new CouchDB("test_suite_auth");
+ var lorem = CouchDB.request(
+ "GET", "/_utils/script/test/lorem.txt").responseText;
+ var lorem_b64 = CouchDB.request(
+ "GET", "/_utils/script/test/lorem_b64.txt").responseText;
+
+ usersDb.deleteDb();
+ usersDb.createDb();
+ dbA.deleteDb();
+ dbA.createDb();
+ dbB.deleteDb();
+ dbB.createDb();
+
+ var atts_ddoc = {
+ _id: "_design/i_have_atts",
+ language: "javascript"
+ };
+ T(dbA.save(atts_ddoc).ok);
+
+ var rev = atts_ddoc._rev;
+ var att_1_name = "lorem.txt";
+ var att_2_name = "lorem.dat";
+ var xhr = CouchDB.request(
+ "PUT", "/" + dbA.name + "/" + atts_ddoc._id + "/" + att_1_name + "?rev=" + rev, {
+ headers: {"Content-Type": "text/plain;charset=utf-8"},
+ body: lorem
+ });
+ rev = JSON.parse(xhr.responseText).rev;
+ T(xhr.status === 201);
+ xhr = CouchDB.request(
+ "PUT", "/" + dbA.name + "/" + atts_ddoc._id + "/" + att_2_name + "?rev=" + rev, {
+ headers: {"Content-Type": "application/data"},
+ body: lorem_b64
+ });
+ T(xhr.status === 201);
+
+ var fdmananaUserDoc = CouchDB.prepareUserDoc({
+ name: "fdmanana",
+ roles: ["reader"]
+ }, "qwerty");
+ T(usersDb.save(fdmananaUserDoc).ok);
+
+ T(dbA.setSecObj({
+ admins: {
+ names: [],
+ roles: ["admin"]
+ },
+ readers: {
+ names: [],
+ roles: ["reader"]
+ }
+ }).ok);
+ T(dbB.setSecObj({
+ admins: {
+ names: ["fdmanana"],
+ roles: []
+ }
+ }).ok);
+
+ var server_config = [
+ {
+ section: "couch_httpd_auth",
+ key: "authentication_db",
+ value: usersDb.name
+ },
+ // to prevent admin party mode
+ {
+ section: "admins",
+ key: "joe",
+ value: "erlang"
+ }
+ ];
+
+ var test_fun = function() {
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+ T(CouchDB.session().userCtx.name === "fdmanana");
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") === -1);
+
+ var repResult = CouchDB.replicate(
+ CouchDB.protocol + "fdmanana:qwerty@" + host + "/" + dbA.name,
+ dbB.name
+ );
+ T(repResult.ok === true);
+ T(repResult.history instanceof Array);
+ T(repResult.history.length === 1);
+ T(repResult.history[0].docs_written === 1);
+ T(repResult.history[0].docs_read === 1);
+ T(repResult.history[0].doc_write_failures === 0);
+
+ var atts_ddoc_copy = dbB.open(atts_ddoc._id);
+ T(atts_ddoc_copy !== null);
+ T(typeof atts_ddoc_copy._attachments === "object");
+ T(atts_ddoc_copy._attachments !== null);
+ T(att_1_name in atts_ddoc_copy._attachments);
+ T(att_2_name in atts_ddoc_copy._attachments);
+
+ var xhr = CouchDB.request("GET", "/" + dbB.name + "/" + atts_ddoc._id + "/" + att_1_name);
+ T(xhr.status === 200);
+ T(xhr.responseText === lorem);
+
+ xhr = CouchDB.request("GET", "/" + dbB.name + "/" + atts_ddoc._id + "/" + att_2_name);
+ T(xhr.status === 200);
+ T(xhr.responseText === lorem_b64);
+
+ CouchDB.logout();
+ T(CouchDB.login("joe", "erlang").ok);
+ T(dbA.setSecObj({
+ admins: {
+ names: [],
+ roles: ["bar"]
+ },
+ readers: {
+ names: [],
+ roles: ["foo"]
+ }
+ }).ok);
+ T(dbB.deleteDb().ok === true);
+ T(dbB.createDb().ok === true);
+ T(dbB.setSecObj({
+ admins: {
+ names: ["fdmanana"],
+ roles: []
+ }
+ }).ok);
+ CouchDB.logout();
+
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+ T(CouchDB.session().userCtx.name === "fdmanana");
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") === -1);
+ try {
+ repResult = CouchDB.replicate(
+ CouchDB.protocol + "fdmanana:qwerty@" + host + "/" + dbA.name,
+ dbB.name
+ );
+ T(false, "replication should have failed");
+ } catch(x) {
+ T(x.error === "unauthorized");
+ }
+
+ atts_ddoc_copy = dbB.open(atts_ddoc._id);
+ T(atts_ddoc_copy === null);
+
+ CouchDB.logout();
+ T(CouchDB.login("joe", "erlang").ok);
+ };
+
+ run_on_modified_server(server_config, test_fun);
+
+ // cleanup
+ dbA.deleteDb();
+ dbB.deleteDb();
+ usersDb.deleteDb();
};
diff --git a/share/www/script/test/replicator_db.js b/share/www/script/test/replicator_db.js
new file mode 100644
index 00000000..3c6a5d8e
--- /dev/null
+++ b/share/www/script/test/replicator_db.js
@@ -0,0 +1,818 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+couchTests.replicator_db = function(debug) {
+
+ if (debug) debugger;
+
+ var wait_rep_doc = 500; // number of millisecs to wait after saving a Rep Doc
+ var host = CouchDB.host;
+ var dbA = new CouchDB("test_suite_rep_db_a", {"X-Couch-Full-Commit":"false"});
+ var dbB = new CouchDB("test_suite_rep_db_b", {"X-Couch-Full-Commit":"false"});
+ var repDb = new CouchDB("test_suite_rep_db", {"X-Couch-Full-Commit":"false"});
+ var usersDb = new CouchDB("test_suite_auth", {"X-Couch-Full-Commit":"false"});
+
+ var docs1 = [
+ {
+ _id: "foo1",
+ value: 11
+ },
+ {
+ _id: "foo2",
+ value: 22
+ },
+ {
+ _id: "foo3",
+ value: 33
+ }
+ ];
+
+ function waitForRep(repDb, repDoc, state) {
+ var newRep,
+ t0 = new Date(),
+ t1,
+ ms = 1000;
+
+ do {
+ newRep = repDb.open(repDoc._id);
+ t1 = new Date();
+ } while (((t1 - t0) <= ms) && newRep._replication_state !== state);
+ }
+
+ function waitForSeq(sourceDb, targetDb) {
+ var targetSeq,
+ sourceSeq = sourceDb.info().update_seq,
+ t0 = new Date(),
+ t1,
+ ms = 1000;
+
+ do {
+ targetSeq = targetDb.info().update_seq;
+ t1 = new Date();
+ } while (((t1 - t0) <= ms) && targetSeq < sourceSeq);
+ }
+
+ function wait(ms) {
+ var t0 = new Date(), t1;
+ do {
+ CouchDB.request("GET", "/");
+ t1 = new Date();
+ } while ((t1 - t0) <= ms);
+ }
+
+
+ function populate_db(db, docs) {
+ db.deleteDb();
+ db.createDb();
+ for (var i = 0; i < docs.length; i++) {
+ var d = docs[i];
+ delete d._rev;
+ T(db.save(d).ok);
+ }
+ }
+
+ function simple_replication() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc = {
+ _id: "foo_simple_rep",
+ source: dbA.name,
+ target: dbB.name
+ };
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "completed");
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ var repDoc1 = repDb.open(repDoc._id);
+ T(repDoc1 !== null);
+ T(repDoc1.source === repDoc.source);
+ T(repDoc1.target === repDoc.target);
+ T(repDoc1._replication_state === "completed", "simple");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+ }
+
+
+ function filtered_replication() {
+ var docs2 = docs1.concat([
+ {
+ _id: "_design/mydesign",
+ language : "javascript",
+ filters : {
+ myfilter : (function(doc, req) {
+ return (doc.value % 2) !== Number(req.query.myparam);
+ }).toString()
+ }
+ }
+ ]);
+
+ populate_db(dbA, docs2);
+ populate_db(dbB, []);
+
+ var repDoc = {
+ _id: "foo_filt_rep_doc",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name,
+ filter: "mydesign/myfilter",
+ query_params: {
+ myparam: 1
+ }
+ };
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "completed");
+ for (var i = 0; i < docs2.length; i++) {
+ var doc = docs2[i];
+ var copy = dbB.open(doc._id);
+
+ if (typeof doc.value === "number") {
+ if ((doc.value % 2) !== 1) {
+ T(copy !== null);
+ T(copy.value === doc.value);
+ } else {
+ T(copy === null);
+ }
+ }
+ }
+
+ var repDoc1 = repDb.open(repDoc._id);
+ T(repDoc1 !== null);
+ T(repDoc1.source === repDoc.source);
+ T(repDoc1.target === repDoc.target);
+ T(repDoc1._replication_state === "completed", "filtered");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+ }
+
+
+ function continuous_replication() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc = {
+ _id: "foo_cont_rep_doc",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name,
+ continuous: true
+ };
+
+ T(repDb.save(repDoc).ok);
+
+ waitForSeq(dbA, dbB);
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ // add another doc to source, it will be replicated to target
+ var docX = {
+ _id: "foo1000",
+ value: 1001
+ };
+
+ T(dbA.save(docX).ok);
+
+ waitForSeq(dbA, dbB);
+ var copy = dbB.open("foo1000");
+ T(copy !== null);
+ T(copy.value === 1001);
+
+ var repDoc1 = repDb.open(repDoc._id);
+ T(repDoc1 !== null);
+ T(repDoc1.source === repDoc.source);
+ T(repDoc1.target === repDoc.target);
+ T(repDoc1._replication_state === "triggered");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+
+ // add a design doc to source, it will be replicated to target
+ // when the "user_ctx" property is not defined in the replication doc,
+ // the replication will be done under an _admin context, therefore
+ // design docs will be replicated
+ var ddoc = {
+ _id: "_design/foobar",
+ language: "javascript"
+ };
+
+ T(dbA.save(ddoc).ok);
+
+ waitForSeq(dbA, dbB);
+ var ddoc_copy = dbB.open("_design/foobar");
+ T(ddoc_copy !== null);
+ T(ddoc.language === "javascript");
+
+ // update the design doc on source, test that the new revision is replicated
+ ddoc.language = "erlang";
+ T(dbA.save(ddoc).ok);
+ T(ddoc._rev.indexOf("2-") === 0);
+
+ waitForSeq(dbA, dbB);
+ ddoc_copy = dbB.open("_design/foobar");
+ T(ddoc_copy !== null);
+ T(ddoc_copy._rev === ddoc._rev);
+ T(ddoc.language === "erlang");
+
+ // stop replication by deleting the replication document
+ T(repDb.deleteDoc(repDoc1).ok);
+
+ // add another doc to source, it will NOT be replicated to target
+ var docY = {
+ _id: "foo666",
+ value: 999
+ };
+
+ T(dbA.save(docY).ok);
+
+ wait(200); // is there a way to avoid wait here?
+ var copy = dbB.open("foo666");
+ T(copy === null);
+ }
+
+
+ function by_doc_ids_replication() {
+ // to test that we can replicate docs with slashes in their IDs
+ var docs2 = docs1.concat([
+ {
+ _id: "_design/mydesign",
+ language : "javascript"
+ }
+ ]);
+
+ populate_db(dbA, docs2);
+ populate_db(dbB, []);
+
+ var repDoc = {
+ _id: "foo_cont_rep_doc",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name,
+ doc_ids: ["foo666", "foo3", "_design/mydesign", "foo999", "foo1"]
+ };
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "completed");
+ var copy = dbB.open("foo1");
+ T(copy !== null);
+ T(copy.value === 11);
+
+ copy = dbB.open("foo2");
+ T(copy === null);
+
+ copy = dbB.open("foo3");
+ T(copy !== null);
+ T(copy.value === 33);
+
+ copy = dbB.open("foo666");
+ T(copy === null);
+
+ copy = dbB.open("foo999");
+ T(copy === null);
+
+ copy = dbB.open("_design/mydesign");
+ T(copy !== null);
+ T(copy.language === "javascript");
+ }
+
+
+ function successive_identical_replications() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc1 = {
+ _id: "foo_ident_rep_1",
+ source: dbA.name,
+ target: dbB.name
+ };
+ T(repDb.save(repDoc1).ok);
+
+ waitForRep(repDb, repDoc1, "completed");
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ var repDoc1_copy = repDb.open(repDoc1._id);
+ T(repDoc1_copy !== null);
+ T(repDoc1_copy.source === repDoc1.source);
+ T(repDoc1_copy.target === repDoc1.target);
+ T(repDoc1_copy._replication_state === "completed");
+ T(typeof repDoc1_copy._replication_state_time === "number");
+ T(typeof repDoc1_copy._replication_id === "string");
+
+ var newDoc = {
+ _id: "doc666",
+ value: 666
+ };
+ T(dbA.save(newDoc).ok);
+
+ wait(200);
+ var newDoc_copy = dbB.open(newDoc._id);
+ // not replicated because first replication is complete (not continuous)
+ T(newDoc_copy === null);
+
+ var repDoc2 = {
+ _id: "foo_ident_rep_2",
+ source: dbA.name,
+ target: dbB.name
+ };
+ T(repDb.save(repDoc2).ok);
+
+ waitForRep(repDb, repDoc2, "completed");
+ var newDoc_copy = dbB.open(newDoc._id);
+ T(newDoc_copy !== null);
+ T(newDoc_copy.value === newDoc.value);
+
+ var repDoc2_copy = repDb.open(repDoc2._id);
+ T(repDoc2_copy !== null);
+ T(repDoc2_copy.source === repDoc1.source);
+ T(repDoc2_copy.target === repDoc1.target);
+ T(repDoc2_copy._replication_state === "completed");
+ T(typeof repDoc2_copy._replication_state_time === "number");
+ T(typeof repDoc2_copy._replication_id === "string");
+ T(repDoc2_copy._replication_id === repDoc1_copy._replication_id);
+ }
+
+
+ // test the case where multiple replication docs (different IDs)
+ // describe in fact the same replication (source, target, etc)
+ function identical_rep_docs() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc1 = {
+ _id: "foo_dup_rep_doc_1",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name
+ };
+ var repDoc2 = {
+ _id: "foo_dup_rep_doc_2",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name
+ };
+
+ T(repDb.save(repDoc1).ok);
+ T(repDb.save(repDoc2).ok);
+
+ waitForRep(repDb, repDoc1, "completed");
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ repDoc1 = repDb.open("foo_dup_rep_doc_1");
+ T(repDoc1 !== null);
+ T(repDoc1._replication_state === "completed", "identical");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+
+ repDoc2 = repDb.open("foo_dup_rep_doc_2");
+ T(repDoc2 !== null);
+ T(typeof repDoc2._replication_state === "undefined");
+ T(typeof repDoc2._replication_state_time === "undefined");
+ T(repDoc2._replication_id === repDoc1._replication_id);
+ }
+
+
+ // test the case where multiple replication docs (different IDs)
+ // describe in fact the same continuous replication (source, target, etc)
+ function identical_continuous_rep_docs() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc1 = {
+ _id: "foo_dup_cont_rep_doc_1",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name,
+ continuous: true
+ };
+ var repDoc2 = {
+ _id: "foo_dup_cont_rep_doc_2",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name,
+ continuous: true
+ };
+
+ T(repDb.save(repDoc1).ok);
+ T(repDb.save(repDoc2).ok);
+
+ waitForSeq(dbA, dbB);
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ repDoc1 = repDb.open("foo_dup_cont_rep_doc_1");
+ T(repDoc1 !== null);
+ T(repDoc1._replication_state === "triggered");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+
+ repDoc2 = repDb.open("foo_dup_cont_rep_doc_2");
+ T(repDoc2 !== null);
+ T(typeof repDoc2._replication_state === "undefined");
+ T(typeof repDoc2._replication_state_time === "undefined");
+ T(repDoc2._replication_id === repDoc1._replication_id);
+
+ var newDoc = {
+ _id: "foo666",
+ value: 999
+ };
+ T(dbA.save(newDoc).ok);
+
+ waitForSeq(dbA, dbB);
+ var copy = dbB.open("foo666");
+ T(copy !== null);
+ T(copy.value === 999);
+
+ // deleting second replication doc, doesn't affect the 1st one and
+ // neither it stops the replication
+ T(repDb.deleteDoc(repDoc2).ok);
+ repDoc1 = repDb.open("foo_dup_cont_rep_doc_1");
+ T(repDoc1 !== null);
+ T(repDoc1._replication_state === "triggered");
+ T(typeof repDoc1._replication_state_time === "number");
+
+ var newDoc2 = {
+ _id: "foo5000",
+ value: 5000
+ };
+ T(dbA.save(newDoc2).ok);
+
+ waitForSeq(dbA, dbB);
+ var copy = dbB.open("foo5000");
+ T(copy !== null);
+ T(copy.value === 5000);
+
+ // deleting the 1st replication document stops the replication
+ T(repDb.deleteDoc(repDoc1).ok);
+ var newDoc3 = {
+ _id: "foo1983",
+ value: 1983
+ };
+ T(dbA.save(newDoc3).ok);
+
+ wait(wait_rep_doc); //how to remove wait?
+ var copy = dbB.open("foo1983");
+ T(copy === null);
+ }
+
+
+ function test_replication_credentials_delegation() {
+ populate_db(usersDb, []);
+
+ var joeUserDoc = CouchDB.prepareUserDoc({
+ name: "joe",
+ roles: ["god", "erlanger"]
+ }, "erly");
+ T(usersDb.save(joeUserDoc).ok);
+
+ var ddoc = {
+ _id: "_design/beer",
+ language: "javascript"
+ };
+ populate_db(dbA, docs1.concat([ddoc]));
+ populate_db(dbB, []);
+
+ T(dbB.setSecObj({
+ admins: {
+ names: [],
+ roles: ["god"]
+ }
+ }).ok);
+
+ var server_admins_config = [
+ {
+ section: "admins",
+ key: "fdmanana",
+ value: "qwerty"
+ }
+ ];
+
+ run_on_modified_server(server_admins_config, function() {
+
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+ T(CouchDB.session().userCtx.name === "fdmanana");
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") !== -1);
+
+ var repDoc = {
+ _id: "foo_rep_del_doc_1",
+ source: dbA.name,
+ target: dbB.name,
+ user_ctx: {
+ name: "joe",
+ roles: ["erlanger"]
+ }
+ };
+
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "completed");
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ // design doc was not replicated, because joe is not an admin of db B
+ var doc = dbB.open(ddoc._id);
+ T(doc === null);
+
+ // now test the same replication but putting the role "god" in the
+ // delegation user context property
+ var repDoc2 = {
+ _id: "foo_rep_del_doc_2",
+ source: dbA.name,
+ target: dbB.name,
+ user_ctx: {
+ name: "joe",
+ roles: ["erlanger", "god"]
+ }
+ };
+ T(repDb.save(repDoc2).ok);
+
+ waitForRep(repDb, repDoc2, "completed");
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ // because anyone with a 'god' role is an admin of db B, a replication
+ // that is delegated to a 'god' role can write design docs to db B
+ doc = dbB.open(ddoc._id);
+ T(doc !== null);
+ T(doc.language === ddoc.language);
+ });
+ }
+
+
+ function continuous_replication_survives_restart() {
+ var origRepDbName = CouchDB.request(
+ "GET", "/_config/replicator/db").responseText;
+
+ repDb.deleteDb();
+
+ var xhr = CouchDB.request("PUT", "/_config/replicator/db", {
+ body : JSON.stringify(repDb.name),
+ headers: {"X-Couch-Persist": "false"}
+ });
+ T(xhr.status === 200);
+
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc = {
+ _id: "foo_cont_rep_survives_doc",
+ source: "http://" + host + "/" + dbA.name,
+ target: dbB.name,
+ continuous: true
+ };
+
+ T(repDb.save(repDoc).ok);
+
+ waitForSeq(dbA, dbB);
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ repDb.ensureFullCommit();
+ dbA.ensureFullCommit();
+
+ restartServer();
+
+ xhr = CouchDB.request("PUT", "/_config/replicator/db", {
+ body : JSON.stringify(repDb.name),
+ headers: {"X-Couch-Persist": "false"}
+ });
+
+ T(xhr.status === 200);
+
+ // add another doc to source, it will be replicated to target
+ var docX = {
+ _id: "foo1000",
+ value: 1001
+ };
+
+ T(dbA.save(docX).ok);
+
+ waitForSeq(dbA, dbB);
+ var copy = dbB.open("foo1000");
+ T(copy !== null);
+ T(copy.value === 1001);
+
+ repDoc = repDb.open("foo_cont_rep_survives_doc");
+ T(repDoc !== null);
+ T(repDoc.continuous === true);
+
+ // stop replication
+ T(repDb.deleteDoc(repDoc).ok);
+
+ xhr = CouchDB.request("PUT", "/_config/replicator/db", {
+ body : origRepDbName,
+ headers: {"X-Couch-Persist": "false"}
+ });
+ T(xhr.status === 200);
+ }
+
+
+ function rep_db_write_authorization() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var server_admins_config = [
+ {
+ section: "admins",
+ key: "fdmanana",
+ value: "qwerty"
+ }
+ ];
+
+ run_on_modified_server(server_admins_config, function() {
+ var repDoc = {
+ _id: "foo_rep_doc",
+ source: dbA.name,
+ target: dbB.name
+ };
+
+ T(CouchDB.login("fdmanana", "qwerty").ok);
+ T(CouchDB.session().userCtx.name === "fdmanana");
+ T(CouchDB.session().userCtx.roles.indexOf("_admin") !== -1);
+
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "completed");
+
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ repDoc = repDb.open("foo_rep_doc");
+ T(repDoc !== null);
+ repDoc.target = "test_suite_foo_db";
+ repDoc.create_target = true;
+
+ // Only the replicator can update replication documents.
+ // Admins can only add and delete replication documents.
+ try {
+ repDb.save(repDoc);
+ T(false && "Should have thrown an exception");
+ } catch (x) {
+ T(x["error"] === "forbidden");
+ }
+ });
+ }
+
+
+ function rep_doc_with_bad_rep_id() {
+ populate_db(dbA, docs1);
+ populate_db(dbB, []);
+
+ var repDoc = {
+ _id: "foo_rep",
+ source: dbA.name,
+ target: dbB.name,
+ replication_id: "1234abc"
+ };
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "completed");
+ for (var i = 0; i < docs1.length; i++) {
+ var doc = docs1[i];
+ var copy = dbB.open(doc._id);
+ T(copy !== null);
+ T(copy.value === doc.value);
+ }
+
+ var repDoc1 = repDb.open(repDoc._id);
+ T(repDoc1 !== null);
+ T(repDoc1.source === repDoc.source);
+ T(repDoc1.target === repDoc.target);
+ T(repDoc1._replication_state === "completed",
+ "replication document with bad replication id failed");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+ T(repDoc1._replication_id !== "1234abc");
+ }
+
+
+ function error_state_replication() {
+ populate_db(dbA, docs1);
+
+ var repDoc = {
+ _id: "foo_error_rep",
+ source: dbA.name,
+ target: "nonexistent_test_db"
+ };
+ T(repDb.save(repDoc).ok);
+
+ waitForRep(repDb, repDoc, "error");
+ var repDoc1 = repDb.open(repDoc._id);
+ T(repDoc1 !== null);
+ T(repDoc1._replication_state === "error");
+ T(typeof repDoc1._replication_state_time === "number");
+ T(typeof repDoc1._replication_id === "string");
+ }
+
+
+ // run all the tests
+ var server_config = [
+ {
+ section: "replicator",
+ key: "db",
+ value: repDb.name
+ }
+ ];
+
+ repDb.deleteDb();
+ run_on_modified_server(server_config, simple_replication);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, filtered_replication);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, continuous_replication);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, by_doc_ids_replication);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, successive_identical_replications);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, identical_rep_docs);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, identical_continuous_rep_docs);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, rep_db_write_authorization);
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, rep_doc_with_bad_rep_id);
+
+ var server_config_2 = server_config.concat([
+ {
+ section: "couch_httpd_auth",
+ key: "authentication_db",
+ value: usersDb.name
+ }
+ ]);
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config_2, test_replication_credentials_delegation);
+
+ repDb.deleteDb();
+ restartServer();
+ continuous_replication_survives_restart();
+
+ repDb.deleteDb();
+ restartServer();
+ run_on_modified_server(server_config, error_state_replication);
+
+
+ // cleanup
+ repDb.deleteDb();
+ usersDb.deleteDb();
+ dbA.deleteDb();
+ dbB.deleteDb();
+};
diff --git a/share/www/script/test/rewrite.js b/share/www/script/test/rewrite.js
new file mode 100644
index 00000000..474abb5e
--- /dev/null
+++ b/share/www/script/test/rewrite.js
@@ -0,0 +1,443 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+
+
+couchTests.rewrite = function(debug) {
+ // this test _rewrite handler
+
+
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
+ db.deleteDb();
+ db.createDb();
+
+
+ if (debug) debugger;
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "authentication_handlers",
+ value: "{couch_httpd_auth, special_test_authentication_handler}"},
+ {section:"httpd",
+ key: "WWW-Authenticate",
+ value: "X-Couch-Test-Auth"}],
+
+ function(){
+ var designDoc = {
+ _id:"_design/test",
+ language: "javascript",
+ _attachments:{
+ "foo.txt": {
+ content_type:"text/plain",
+ data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+ }
+ },
+ rewrites: [
+ {
+ "from": "foo",
+ "to": "foo.txt"
+ },
+ {
+ "from": "foo2",
+ "to": "foo.txt",
+ "method": "GET"
+ },
+ {
+ "from": "hello/:id",
+ "to": "_update/hello/:id",
+ "method": "PUT"
+ },
+ {
+ "from": "/welcome",
+ "to": "_show/welcome"
+ },
+ {
+ "from": "/welcome/:name",
+ "to": "_show/welcome",
+ "query": {
+ "name": ":name"
+ }
+ },
+ {
+ "from": "/welcome2",
+ "to": "_show/welcome",
+ "query": {
+ "name": "user"
+ }
+ },
+ {
+ "from": "/welcome3/:name",
+ "to": "_update/welcome2/:name",
+ "method": "PUT"
+ },
+ {
+ "from": "/welcome3/:name",
+ "to": "_show/welcome2/:name",
+ "method": "GET"
+ },
+ {
+ "from": "/welcome4/*",
+ "to" : "_show/welcome3",
+ "query": {
+ "name": "*"
+ }
+ },
+ {
+ "from": "/type/<type>.json",
+ "to": "_show/type/:type",
+ "query": {
+ "format": "json"
+ }
+ },
+ {
+ "from": "/type/<type>.xml",
+ "to": "_show/type/:type",
+ "query": {
+ "format": "xml"
+ }
+ },
+ {
+ "from": "/type/<type>",
+ "to": "_show/type/:type",
+ "query": {
+ "format": "html"
+ }
+ },
+ {
+ "from": "/welcome5/*",
+ "to" : "_show/*",
+ "query": {
+ "name": "*"
+ }
+ },
+ {
+ "from": "basicView",
+ "to": "_view/basicView",
+ },
+ {
+ "from": "simpleForm/basicView",
+ "to": "_list/simpleForm/basicView",
+ },
+ {
+ "from": "simpleForm/basicViewFixed",
+ "to": "_list/simpleForm/basicView",
+ "query": {
+ "startkey": 3,
+ "endkey": 8
+ }
+ },
+ {
+ "from": "simpleForm/basicViewPath/:start/:end",
+ "to": "_list/simpleForm/basicView",
+ "query": {
+ "startkey": ":start",
+ "endkey": ":end"
+ }
+ },
+ {
+ "from": "simpleForm/complexView",
+ "to": "_list/simpleForm/complexView",
+ "query": {
+ "key": [1, 2]
+ }
+ },
+ {
+ "from": "simpleForm/complexView2",
+ "to": "_list/simpleForm/complexView",
+ "query": {
+ "key": ["test", {}]
+ }
+ },
+ {
+ "from": "simpleForm/complexView3",
+ "to": "_list/simpleForm/complexView",
+ "query": {
+ "key": ["test", ["test", "essai"]]
+ }
+ },
+ {
+ "from": "simpleForm/complexView4",
+ "to": "_list/simpleForm/complexView2",
+ "query": {
+ "key": {"c": 1}
+ }
+ },
+ {
+ "from": "simpleForm/complexView5/:a/:b",
+ "to": "_list/simpleForm/complexView3",
+ "query": {
+ "key": [":a", ":b"]
+ }
+ },
+ {
+ "from": "simpleForm/complexView6",
+ "to": "_list/simpleForm/complexView3",
+ "query": {
+ "key": [":a", ":b"]
+ }
+ },
+ {
+ "from": "/",
+ "to": "_view/basicView",
+ }
+ ],
+ lists: {
+ simpleForm: stringFun(function(head, req) {
+ log("simpleForm");
+ send('<ul>');
+ var row, row_number = 0, prevKey, firstKey = null;
+ while (row = getRow()) {
+ row_number += 1;
+ if (!firstKey) firstKey = row.key;
+ prevKey = row.key;
+ send('\n<li>Key: '+row.key
+ +' Value: '+row.value
+ +' LineNo: '+row_number+'</li>');
+ }
+ return '</ul><p>FirstKey: '+ firstKey + ' LastKey: '+ prevKey+'</p>';
+ }),
+ },
+ shows: {
+ "welcome": stringFun(function(doc,req) {
+ return "Welcome " + req.query["name"];
+ }),
+ "welcome2": stringFun(function(doc, req) {
+ return "Welcome " + doc.name;
+ }),
+ "welcome3": stringFun(function(doc,req) {
+ return "Welcome " + req.query["name"];
+ }),
+ "type": stringFun(function(doc, req) {
+ return req.id + " as " + req.query.format;
+ })
+ },
+ updates: {
+ "hello" : stringFun(function(doc, req) {
+ if (!doc) {
+ if (req.id) {
+ return [{
+ _id : req.id
+ }, "New World"]
+ }
+ return [null, "Empty World"];
+ }
+ doc.world = "hello";
+ doc.edited_by = req.userCtx;
+ return [doc, "hello doc"];
+ }),
+ "welcome2": stringFun(function(doc, req) {
+ if (!doc) {
+ if (req.id) {
+ return [{
+ _id: req.id,
+ name: req.id
+ }, "New World"]
+ }
+ return [null, "Empty World"];
+ }
+ return [doc, "hello doc"];
+ })
+ },
+ views : {
+ basicView : {
+ map : stringFun(function(doc) {
+ if (doc.integer) {
+ emit(doc.integer, doc.string);
+ }
+
+ })
+ },
+ complexView: {
+ map: stringFun(function(doc) {
+ if (doc.type == "complex") {
+ emit([doc.a, doc.b], doc.string);
+ }
+ })
+ },
+ complexView2: {
+ map: stringFun(function(doc) {
+ if (doc.type == "complex") {
+ emit(doc.a, doc.string);
+ }
+ })
+ },
+ complexView3: {
+ map: stringFun(function(doc) {
+ if (doc.type == "complex") {
+ emit(doc.b, doc.string);
+ }
+ })
+ }
+ }
+ }
+
+ db.save(designDoc);
+
+ var docs = makeDocs(0, 10);
+ db.bulkSave(docs);
+
+ var docs2 = [
+ {"a": 1, "b": 1, "string": "doc 1", "type": "complex"},
+ {"a": 1, "b": 2, "string": "doc 2", "type": "complex"},
+ {"a": "test", "b": {}, "string": "doc 3", "type": "complex"},
+ {"a": "test", "b": ["test", "essai"], "string": "doc 4", "type": "complex"},
+ {"a": {"c": 1}, "b": "", "string": "doc 5", "type": "complex"}
+ ];
+
+ db.bulkSave(docs2);
+
+ // test simple rewriting
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/foo");
+ T(req.responseText == "This is a base64 encoded text");
+ T(req.getResponseHeader("Content-Type") == "text/plain");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/foo2");
+ T(req.responseText == "This is a base64 encoded text");
+ T(req.getResponseHeader("Content-Type") == "text/plain");
+
+
+ // test POST
+ // hello update world
+
+ var doc = {"word":"plankton", "name":"Rusty"}
+ var resp = db.save(doc);
+ T(resp.ok);
+ var docid = resp.id;
+
+ xhr = CouchDB.request("PUT", "/test_suite_db/_design/test/_rewrite/hello/"+docid);
+ T(xhr.status == 201);
+ T(xhr.responseText == "hello doc");
+ T(/charset=utf-8/.test(xhr.getResponseHeader("Content-Type")))
+
+ doc = db.open(docid);
+ T(doc.world == "hello");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/welcome?name=user");
+ T(req.responseText == "Welcome user");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/welcome/user");
+ T(req.responseText == "Welcome user");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/welcome2");
+ T(req.responseText == "Welcome user");
+
+ xhr = CouchDB.request("PUT", "/test_suite_db/_design/test/_rewrite/welcome3/test");
+ T(xhr.status == 201);
+ T(xhr.responseText == "New World");
+ T(/charset=utf-8/.test(xhr.getResponseHeader("Content-Type")));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/welcome3/test");
+ T(xhr.responseText == "Welcome test");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/welcome4/user");
+ T(req.responseText == "Welcome user");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/welcome5/welcome3");
+ T(req.responseText == "Welcome welcome3");
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/basicView");
+ T(xhr.status == 200, "view call");
+ T(/{"total_rows":9/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/");
+ T(xhr.status == 200, "view call");
+ T(/{"total_rows":9/.test(xhr.responseText));
+
+
+ // get with query params
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/basicView?startkey=3&endkey=8");
+ T(xhr.status == 200, "with query params");
+ T(!(/Key: 1/.test(xhr.responseText)));
+ T(/FirstKey: 3/.test(xhr.responseText));
+ T(/LastKey: 8/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/basicViewFixed");
+ T(xhr.status == 200, "with query params");
+ T(!(/Key: 1/.test(xhr.responseText)));
+ T(/FirstKey: 3/.test(xhr.responseText));
+ T(/LastKey: 8/.test(xhr.responseText));
+
+ // get with query params
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/basicViewFixed?startkey=4");
+ T(xhr.status == 200, "with query params");
+ T(!(/Key: 1/.test(xhr.responseText)));
+ T(/FirstKey: 3/.test(xhr.responseText));
+ T(/LastKey: 8/.test(xhr.responseText));
+
+ // get with query params
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/basicViewPath/3/8");
+ T(xhr.status == 200, "with query params");
+ T(!(/Key: 1/.test(xhr.responseText)));
+ T(/FirstKey: 3/.test(xhr.responseText));
+ T(/LastKey: 8/.test(xhr.responseText));
+
+ // get with query params
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/complexView");
+ T(xhr.status == 200, "with query params");
+ T(/FirstKey: [1, 2]/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/complexView2");
+ T(xhr.status == 200, "with query params");
+ T(/Value: doc 3/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/complexView3");
+ T(xhr.status == 200, "with query params");
+ T(/Value: doc 4/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/complexView4");
+ T(xhr.status == 200, "with query params");
+ T(/Value: doc 5/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/complexView5/test/essai");
+ T(xhr.status == 200, "with query params");
+ T(/Value: doc 4/.test(xhr.responseText));
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/simpleForm/complexView6?a=test&b=essai");
+ T(xhr.status == 200, "with query params");
+ T(/Value: doc 4/.test(xhr.responseText));
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/type/test.json");
+ T(req.responseText == "test as json");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/type/test.xml");
+ T(req.responseText == "test as xml");
+
+ req = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/type/test");
+ T(req.responseText == "test as html");
+
+ // test path relative to server
+ designDoc.rewrites.push({
+ "from": "uuids",
+ "to": "../../../_uuids"
+ });
+ T(db.save(designDoc).ok);
+
+ var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/uuids");
+ T(xhr.status == 500);
+ var result = JSON.parse(xhr.responseText);
+ T(result.error == "insecure_rewrite_rule");
+
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "secure_rewrites",
+ value: "false"}],
+ function() {
+ var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_rewrite/uuids?cache=bust");
+ T(xhr.status == 200);
+ var result = JSON.parse(xhr.responseText);
+ T(result.uuids.length == 1);
+ var first = result.uuids[0];
+ });
+
+ });
+
+}
diff --git a/share/www/script/test/security_validation.js b/share/www/script/test/security_validation.js
index d07195e1..42aa11c9 100644
--- a/share/www/script/test/security_validation.js
+++ b/share/www/script/test/security_validation.js
@@ -13,7 +13,7 @@
couchTests.security_validation = function(debug) {
// This tests couchdb's security and validation features. This does
// not test authentication, except to use test authentication code made
- // specifically for this testing. It is a WWWW-Authenticate scheme named
+ // specifically for this testing. It is a WWW-Authenticate scheme named
// X-Couch-Test-Auth, and the user names and passwords are hard coded
// on the server-side.
//
@@ -21,7 +21,7 @@ couchTests.security_validation = function(debug) {
// implementation for Firefox and Safari, and probably other browsers are
// broken (Firefox always prompts the user on 401 failures, Safari gives
// odd security errors when using different name/passwords, perhaps due
- // to cross site scripting prevention). These problems essentially make Basic
+ // to cross site scripting prevention). These problems essentially make Basic
// authentication testing in the browser impossible. But while hard to
// test automated in the browser, Basic auth may still useful for real
// world use where these bugs/behaviors don't matter.
@@ -69,7 +69,13 @@ couchTests.security_validation = function(debug) {
var designDoc = {
_id:"_design/test",
language: "javascript",
- validate_doc_update: "(" + (function (newDoc, oldDoc, userCtx) {
+ validate_doc_update: "(" + (function (newDoc, oldDoc, userCtx, secObj) {
+ if (secObj.admin_override) {
+ if (userCtx.roles.indexOf('_admin') != -1) {
+ // user is admin, they can do anything
+ return true;
+ }
+ }
// docs should have an author field.
if (!newDoc._deleted && !newDoc.author) {
throw {forbidden:
@@ -99,13 +105,27 @@ couchTests.security_validation = function(debug) {
}
// set user as the admin
- T(db.setDbProperty("_admins", ["Damien Katz"]).ok);
+ T(db.setSecObj({
+ admins : {names : ["Damien Katz"]}
+ }).ok);
T(userDb.save(designDoc).ok);
- // test the _whoami endpoint
+ var user2Db = new CouchDB("test_suite_db",
+ {"WWW-Authenticate": "X-Couch-Test-Auth Jan Lehnardt:apple"}
+ );
+ // Attempt to save the design as a non-admin (in replication scenario)
+ try {
+ user2Db.save(designDoc, {new_edits : false});
+ T(false && "Can't get here. Should have thrown an error on design doc");
+ } catch (e) {
+ T(e.error == "unauthorized");
+ T(user2Db.last_req.status == 401);
+ }
+
+ // test the _session API
var resp = userDb.request("GET", "/_session");
- var user = JSON.parse(resp.responseText)
+ var user = JSON.parse(resp.responseText).userCtx;
T(user.name == "Damien Katz");
// test that the roles are listed properly
TEquals(user.roles, []);
@@ -116,20 +136,23 @@ couchTests.security_validation = function(debug) {
doc.foo=2;
T(userDb.save(doc).ok);
- // Save a document that's missing an author field.
- try {
- userDb.save({foo:1});
- T(false && "Can't get here. Should have thrown an error 2");
- } catch (e) {
- T(e.error == "forbidden");
- T(userDb.last_req.status == 403);
+ // Save a document that's missing an author field (before and after compaction)
+ for (var i=0; i<2; i++) {
+ try {
+ userDb.save({foo:1});
+ T(false && "Can't get here. Should have thrown an error 2");
+ } catch (e) {
+ T(e.error == "forbidden");
+ T(userDb.last_req.status == 403);
+ }
+ // compact.
+ T(db.compact().ok);
+ T(db.last_req.status == 202);
+ // compaction isn't instantaneous, loop until done
+ while (db.info().compact_running) {};
}
// Now attempt to update the document as a different user, Jan
- var user2Db = new CouchDB("test_suite_db",
- {"WWW-Authenticate": "X-Couch-Test-Auth Jan Lehnardt:apple"}
- );
-
var doc = user2Db.open("testdoc");
doc.foo=3;
try {
@@ -158,6 +181,37 @@ couchTests.security_validation = function(debug) {
T(e.error == "unauthorized");
T(userDb.last_req.status == 401);
}
+
+ // admin must save with author field unless admin override
+ var resp = db.request("GET", "/_session");
+ var user = JSON.parse(resp.responseText).userCtx;
+ T(user.name == null);
+ // test that we are admin
+ TEquals(user.roles, ["_admin"]);
+
+ // can't save the doc even though we are admin
+ var doc = db.open("testdoc");
+ doc.foo=3;
+ try {
+ db.save(doc);
+ T(false && "Can't get here. Should have thrown an error 3");
+ } catch (e) {
+ T(e.error == "unauthorized");
+ T(db.last_req.status == 401);
+ }
+
+ // now turn on admin override
+ T(db.setDbProperty("_security", {admin_override : true}).ok);
+ T(db.save(doc).ok);
+
+ // try to do something lame
+ try {
+ db.setDbProperty("_security", ["foo"]);
+ T(false && "can't do this");
+ } catch(e) {}
+
+ // go back to normal
+ T(db.setDbProperty("_security", {admin_override : false}).ok);
// Now delete document
T(user2Db.deleteDoc(doc).ok);
@@ -188,7 +242,6 @@ couchTests.security_validation = function(debug) {
T(db.open("booboo") == null);
T(db.open("foofoo") == null);
-
// Now test replication
var AuthHeaders = {"WWW-Authenticate": "X-Couch-Test-Auth Christopher Lenz:dog food"};
var host = CouchDB.host;
@@ -197,16 +250,16 @@ couchTests.security_validation = function(debug) {
target:"test_suite_db_b"},
{source:"test_suite_db_a",
- target:{url: "http://" + host + "/test_suite_db_b",
+ target:{url: CouchDB.protocol + host + "/test_suite_db_b",
headers: AuthHeaders}},
- {source:{url:"http://" + host + "/test_suite_db_a",
+ {source:{url:CouchDB.protocol + host + "/test_suite_db_a",
headers: AuthHeaders},
target:"test_suite_db_b"},
- {source:{url:"http://" + host + "/test_suite_db_a",
+ {source:{url:CouchDB.protocol + host + "/test_suite_db_a",
headers: AuthHeaders},
- target:{url:"http://" + host + "/test_suite_db_b",
+ target:{url:CouchDB.protocol + host + "/test_suite_db_b",
headers: AuthHeaders}},
]
var adminDbA = new CouchDB("test_suite_db_a", {"X-Couch-Full-Commit":"false"});
diff --git a/share/www/script/test/show_documents.js b/share/www/script/test/show_documents.js
index f484e029..55ed9698 100644
--- a/share/www/script/test/show_documents.js
+++ b/share/www/script/test/show_documents.js
@@ -25,7 +25,11 @@ couchTests.show_documents = function(debug) {
if (doc) {
return "Hello World";
} else {
- return "Empty World";
+ if(req.id) {
+ return "New World";
+ } else {
+ return "Empty World";
+ }
}
}),
"just-name" : stringFun(function(doc, req) {
@@ -50,18 +54,28 @@ couchTests.show_documents = function(debug) {
json : req
}
}),
+ "show-deleted" : stringFun(function(doc, req) {
+ if(doc) {
+ return doc._id;
+ } else {
+ return "No doc " + req.id;
+ }
+ }),
"render-error" : stringFun(function(doc, req) {
return noSuchVariable;
}),
"empty" : stringFun(function(doc, req) {
return "";
}),
+ "fail" : stringFun(function(doc, req) {
+ return doc._id;
+ }),
"xml-type" : stringFun(function(doc, req) {
return {
"headers" : {
"Content-Type" : "application/xml"
},
- "body" : new XML('<xml><node foo="bar"/></xml>')
+ "body" : new XML('<xml><node foo="bar"/></xml>').toXMLString()
}
}),
"no-set-etag" : stringFun(function(doc, req) {
@@ -72,6 +86,25 @@ couchTests.show_documents = function(debug) {
"body" : "something"
}
}),
+ "list-api" : stringFun(function(doc, req) {
+ start({"X-Couch-Test-Header": "Yeah"});
+ send("Hey");
+ }),
+ "list-api-mix" : stringFun(function(doc, req) {
+ start({"X-Couch-Test-Header": "Yeah"});
+ send("Hey ");
+ return "Dude";
+ }),
+ "list-api-mix-with-header" : stringFun(function(doc, req) {
+ start({"X-Couch-Test-Header": "Yeah"});
+ send("Hey ");
+ return {
+ headers: {
+ "X-Couch-Test-Header-Awesome": "Oh Yeah!"
+ },
+ body: "Dude"
+ };
+ }),
"accept-switch" : stringFun(function(doc, req) {
if (req.headers["Accept"].match(/image/)) {
return {
@@ -114,12 +147,19 @@ couchTests.show_documents = function(debug) {
// E4X outside of a string. Outside of tests you
// can just use E4X literals.
eval('xml.node.@foo = doc.word');
- return xml;
+ log('xml: '+xml.toSource());
+ return xml.toXMLString();
});
provides("foo", function() {
return "foofoo";
});
+ }),
+ "withSlash": stringFun(function(doc, req) {
+ return { json: doc }
+ }),
+ "secObj": stringFun(function(doc, req) {
+ return { json: req.secObj };
})
}
};
@@ -157,7 +197,7 @@ couchTests.show_documents = function(debug) {
T(xhr.responseText == "");
// // hello template world (non-existing docid)
- xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/hello/nonExistingDoc");
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/fail/nonExistingDoc");
T(xhr.status == 404);
var resp = JSON.parse(xhr.responseText);
T(resp.error == "not_found");
@@ -169,8 +209,7 @@ couchTests.show_documents = function(debug) {
// show with missing doc
xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/just-name/missingdoc");
T(xhr.status == 404);
- var resp = JSON.parse(xhr.responseText);
- T(resp.error == "not_found");
+ TEquals("No such doc", xhr.responseText);
// show with missing func
xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/missing/"+docid);
@@ -342,4 +381,56 @@ couchTests.show_documents = function(debug) {
xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/json/foo");
TEquals(1, JSON.parse(xhr.responseText)._conflicts.length);
+ var doc3 = {_id:"a/b/c", a:1};
+ db.save(doc3);
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/withSlash/a/b/c");
+ T(xhr.status == 200);
+
+ // hello template world (non-existing docid)
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/hello/nonExistingDoc");
+ T(xhr.responseText == "New World");
+
+ // test list() compatible API
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/list-api/foo");
+ T(xhr.responseText == "Hey");
+ TEquals("Yeah", xhr.getResponseHeader("X-Couch-Test-Header"), "header should be cool");
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/list-api-mix/foo");
+ T(xhr.responseText == "Hey Dude");
+ TEquals("Yeah", xhr.getResponseHeader("X-Couch-Test-Header"), "header should be cool");
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/list-api-mix-with-header/foo");
+ T(xhr.responseText == "Hey Dude");
+ TEquals("Yeah", xhr.getResponseHeader("X-Couch-Test-Header"), "header should be cool");
+ TEquals("Oh Yeah!", xhr.getResponseHeader("X-Couch-Test-Header-Awesome"), "header should be cool");
+
+ // test deleted docs
+ var doc = {_id:"testdoc",foo:1};
+ db.save(doc);
+ var xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/show-deleted/testdoc");
+ TEquals("testdoc", xhr.responseText, "should return 'testdoc'");
+
+ db.deleteDoc(doc);
+ var xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/show-deleted/testdoc");
+ TEquals("No doc testdoc", xhr.responseText, "should return 'no doc testdoc'");
+
+
+ run_on_modified_server(
+ [{section: "httpd",
+ key: "authentication_handlers",
+ value: "{couch_httpd_auth, special_test_authentication_handler}"},
+ {section:"httpd",
+ key: "WWW-Authenticate",
+ value: "X-Couch-Test-Auth"}],
+
+ function() {
+ T(db.setDbProperty("_security", {foo: true}).ok);
+ T(db.save(doc).ok);
+
+ xhr = CouchDB.request("GET", "/test_suite_db/_design/template/_show/secObj");
+ var resp = JSON.parse(xhr.responseText);
+ T(resp.foo == true);
+ }
+ );
+
};
diff --git a/share/www/script/test/stats.js b/share/www/script/test/stats.js
index 793b390d..6fb0fbba 100644
--- a/share/www/script/test/stats.js
+++ b/share/www/script/test/stats.js
@@ -30,7 +30,7 @@ couchTests.stats = function(debug) {
_id:"_design/test", // turn off couch.js id escaping?
language: "javascript",
views: {
- all_docs: {map: "function(doc) {emit(doc.integer, null);}"},
+ all_docs: {map: "function(doc) {emit(doc.integer, null);}"}
}
};
db.save(designDoc);
@@ -81,19 +81,17 @@ couchTests.stats = function(debug) {
var pre_dbs = getStat("couchdb", "open_databases").current || 0;
var pre_files = getStat("couchdb", "open_os_files").current || 0;
- // We have to make sure that as we open the max'th database
- // that we've waited for more than 1 second since opening
- // the first database so that any delayed commits will be
- // flushed.
var triggered = false;
var db = null;
for(var i = 0; i < max*2; i++) {
- try {
- db = newDb("test_suite_db_" + i, true);
- } catch(e) {
- triggered = true;
- CouchDB.request("GET", "/_sleep?time=1500");
- db = newDb("test_suite_db_" + i, true);
+ while (true) {
+ try {
+ db = newDb("test_suite_db_" + i, true);
+ break;
+ } catch(e) {
+ // all_dbs_active error!
+ triggered = true;
+ }
}
// Trigger a delayed commit
@@ -162,12 +160,15 @@ couchTests.stats = function(debug) {
runTest("couchdb", "database_writes", {
run: function(db) {
- CouchDB.request("POST", "/test_suite_db", {body: '{"a": "1"}'})
+ CouchDB.request("POST", "/test_suite_db", {
+ headers: {"Content-Type": "application/json"},
+ body: '{"a": "1"}'
+ });
},
test: function(before, after) {
TEquals(before+1, after, "POST'ing new docs increments doc writes.");
}
- })
+ });
runTest("couchdb", "database_writes", {
setup: function(db) {db.save({"_id": "test"});},
@@ -246,7 +247,7 @@ couchTests.stats = function(debug) {
});
runTest("httpd", "temporary_view_reads", {
- run: function(db) {db.query(function(doc) {emit(doc._id)})},
+ run: function(db) { db.query(function(doc) { emit(doc._id); }); },
test: function(before, after) {
TEquals(before+1, after, "Temporary views have their own counter.");
}
@@ -260,7 +261,7 @@ couchTests.stats = function(debug) {
});
runTest("httpd", "view_reads", {
- run: function(db) {db.query(function(doc) {emit(doc._id)});},
+ run: function(db) { db.query(function(doc) { emit(doc._id); }); },
test: function(before, after) {
TEquals(before, after, "Temporary views don't affect permanent views.");
}
diff --git a/share/www/script/test/update_documents.js b/share/www/script/test/update_documents.js
index 87fc7352..49d3b68a 100644
--- a/share/www/script/test/update_documents.js
+++ b/share/www/script/test/update_documents.js
@@ -68,10 +68,13 @@ couchTests.update_documents = function(debug) {
"headers" : {
"Content-Type" : "application/xml"
},
- "body" : xml
+ "body" : xml.toXMLString()
};
return [doc, resp];
+ }),
+ "get-uuid" : stringFun(function(doc, req) {
+ return [null, req.uuid];
})
}
};
@@ -110,9 +113,13 @@ couchTests.update_documents = function(debug) {
T(JSON.parse(xhr.responseText).error == "method_not_allowed");
// // hello update world (non-existing docid)
+ xhr = CouchDB.request("GET", "/test_suite_db/nonExistingDoc");
+ T(xhr.status == 404);
xhr = CouchDB.request("PUT", "/test_suite_db/_design/update/_update/hello/nonExistingDoc");
T(xhr.status == 201);
T(xhr.responseText == "<p>New World</p>");
+ xhr = CouchDB.request("GET", "/test_suite_db/nonExistingDoc");
+ T(xhr.status == 200);
// in place update
xhr = CouchDB.request("PUT", "/test_suite_db/_design/update/_update/in-place/"+docid+'?field=title&value=test');
@@ -123,7 +130,7 @@ couchTests.update_documents = function(debug) {
// bump counter
xhr = CouchDB.request("PUT", "/test_suite_db/_design/update/_update/bump-counter/"+docid, {
- headers : {"X-Couch-Full-Commit":"false"}
+ headers : {"X-Couch-Full-Commit":"true"}
});
T(xhr.status == 201);
T(xhr.responseText == "<h1>bumped it!</h1>");
@@ -135,12 +142,16 @@ couchTests.update_documents = function(debug) {
headers : {"X-Couch-Full-Commit":"true"}
});
+ var NewRev = xhr.getResponseHeader("X-Couch-Update-NewRev");
doc = db.open(docid);
+ T(doc['_rev'] == NewRev);
+
+
T(doc.counter == 2);
// parse xml
xhr = CouchDB.request("PUT", "/test_suite_db/_design/update/_update/xml/"+docid, {
- headers : {"X-Couch-Full-Commit":"false"},
+ headers : {"X-Couch-Full-Commit":"true"},
"body" : '<xml><foo>bar</foo></xml>'
});
T(xhr.status == 201);
@@ -148,5 +159,10 @@ couchTests.update_documents = function(debug) {
doc = db.open(docid);
T(doc.via_xml == "bar");
+
+ // Server provides UUID when POSTing without an ID in the URL
+ xhr = CouchDB.request("POST", "/test_suite_db/_design/update/_update/get-uuid/");
+ T(xhr.status == 200);
+ T(xhr.responseText.length == 32);
};
diff --git a/share/www/script/test/users_db.js b/share/www/script/test/users_db.js
index 2cf63fcf..1e13e5d7 100644
--- a/share/www/script/test/users_db.js
+++ b/share/www/script/test/users_db.js
@@ -24,44 +24,101 @@ couchTests.users_db = function(debug) {
// to determine the actual users db name.
function testFun() {
- usersDb.deleteDb();
-
// test that the validation function is installed
var ddoc = usersDb.open("_design/_auth");
T(ddoc.validate_doc_update);
// test that you can login as a user using basic auth
var jchrisUserDoc = CouchDB.prepareUserDoc({
- username: "jchris@apache.org"
+ name: "jchris@apache.org"
}, "funnybone");
T(usersDb.save(jchrisUserDoc).ok);
- T(CouchDB.session().name == null);
+ T(CouchDB.session().userCtx.name == null);
+
+ // test that you can use basic auth aginst the users db
var s = CouchDB.session({
headers : {
+ // base64_encode("jchris@apache.org:funnybone")
"Authorization" : "Basic amNocmlzQGFwYWNoZS5vcmc6ZnVubnlib25l"
}
});
- T(s.name == "jchris@apache.org");
- T(s.user_doc._id == "org.couchdb.user:jchris@apache.org")
- T(s.info.authenticated == "{couch_httpd_auth, default_authentication_handler}");
- T(s.info.user_db == "test_suite_users");
- TEquals(["{couch_httpd_oauth, oauth_authentication_handler}",
- "{couch_httpd_auth, cookie_authentication_handler}",
- "{couch_httpd_auth, default_authentication_handler}"], s.info.handlers);
+ T(s.userCtx.name == "jchris@apache.org");
+ T(s.info.authenticated == "default");
+ T(s.info.authentication_db == "test_suite_users");
+ TEquals(["oauth", "cookie", "default"], s.info.authentication_handlers);
var s = CouchDB.session({
headers : {
- "Authorization" : "Basic Xzpf" // username and pass of _:_
+ "Authorization" : "Basic Xzpf" // name and pass of _:_
}
});
T(s.name == null);
- T(s.info.authenticated == "{couch_httpd_auth, default_authentication_handler}");
+ T(s.info.authenticated == "default");
+
+
+ // ok, now create a conflicting edit on the jchris doc, and make sure there's no login.
+ var jchrisUser2 = JSON.parse(JSON.stringify(jchrisUserDoc));
+ jchrisUser2.foo = "bar";
+ T(usersDb.save(jchrisUser2).ok);
+ try {
+ usersDb.save(jchrisUserDoc);
+ T(false && "should be an update conflict")
+ } catch(e) {
+ T(true);
+ }
+ // save as bulk with new_edits=false to force conflict save
+ var resp = usersDb.bulkSave([jchrisUserDoc],{all_or_nothing : true});
+
+ var jchrisWithConflict = usersDb.open(jchrisUserDoc._id, {conflicts : true});
+ T(jchrisWithConflict._conflicts.length == 1)
+
+ // no login with conflicted user doc
+ try {
+ var s = CouchDB.session({
+ headers : {
+ "Authorization" : "Basic amNocmlzQGFwYWNoZS5vcmc6ZnVubnlib25l"
+ }
+ });
+ T(false && "this will throw")
+ } catch(e) {
+ T(e.error == "unauthorized")
+ T(/conflict/.test(e.reason))
+ }
+
+ // you can delete a user doc
+ s = CouchDB.session().userCtx;
+ T(s.name == null);
+ T(s.roles.indexOf("_admin") !== -1);
+ T(usersDb.deleteDoc(jchrisWithConflict).ok);
+
+ // you can't change doc from type "user"
+ jchrisUserDoc = usersDb.open(jchrisUserDoc._id);
+ jchrisUserDoc.type = "not user";
+ try {
+ usersDb.save(jchrisUserDoc);
+ T(false && "should only allow us to save doc when type == 'user'");
+ } catch(e) {
+ T(e.reason == "doc.type must be user");
+ }
+ jchrisUserDoc.type = "user";
+
+ // "roles" must be an array
+ jchrisUserDoc.roles = "not an array";
+ try {
+ usersDb.save(jchrisUserDoc);
+ T(false && "should only allow us to save doc when roles is an array");
+ } catch(e) {
+ T(e.reason == "doc.roles must be an array");
+ }
+ jchrisUserDoc.roles = [];
};
-
+
+ usersDb.deleteDb();
run_on_modified_server(
[{section: "couch_httpd_auth",
- key: "authentication_db", value: "test_suite_users"}],
+ key: "authentication_db", value: usersDb.name}],
testFun
);
+ usersDb.deleteDb(); // cleanup
-} \ No newline at end of file
+}
diff --git a/share/www/script/test/uuids.js b/share/www/script/test/uuids.js
index 4de7ce90..fc33a105 100644
--- a/share/www/script/test/uuids.js
+++ b/share/www/script/test/uuids.js
@@ -93,12 +93,20 @@ couchTests.uuids = function(debug) {
xhr = CouchDB.request("GET", "/_uuids?count=1000");
T(xhr.status == 200);
result = JSON.parse(xhr.responseText);
- for(var i = 1; i < result.uuids.length; i++) {
- T(result.uuids[i].length == 32);
- var u1 = result.uuids[i-1].substr(0, 13);
- var u2 = result.uuids[i].substr(0, 13);
- T(u1 < u2, "UTC uuids are roughly ordered.");
+ T(result.uuids[1].length == 32);
+
+ // no collisions
+ var seen = {};
+ for(var i in result.uuids) {
+ var id = result.uuids[i];
+ T(seen[id] === undefined);
+ seen[id] = 1;
}
+
+ // roughly ordered
+ var u1 = result.uuids[1].substr(0, 13);
+ var u2 = result.uuids[result.uuids.length-1].substr(0, 13);
+ T(u1 < u2, "UTC uuids are only roughly ordered, so this assertion may fail occasionally. Don't sweat it.");
};
run_on_modified_server([{
diff --git a/share/www/script/test/view_collation.js b/share/www/script/test/view_collation.js
index f4ae4a15..b01a5c50 100644
--- a/share/www/script/test/view_collation.js
+++ b/share/www/script/test/view_collation.js
@@ -90,27 +90,27 @@ couchTests.view_collation = function(debug) {
// the inclusive_end=true functionality is limited to endkey currently
// if you need inclusive_start=false for startkey, please do implement. ;)
var rows = db.query(queryFun, null, {endkey : "b", inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b")
+ T(rows[rows.length-1].key == "b");
// descending=true
var rows = db.query(queryFun, null, {endkey : "b",
descending:true, inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b")
+ T(rows[rows.length-1].key == "b");
// test inclusive_end=false
var rows = db.query(queryFun, null, {endkey : "b", inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa")
+ T(rows[rows.length-1].key == "aa");
// descending=true
var rows = db.query(queryFun, null, {endkey : "b",
descending:true, inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "B")
+ T(rows[rows.length-1].key == "B");
var rows = db.query(queryFun, null, {
endkey : "b", endkey_docid: "10",
inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa")
+ T(rows[rows.length-1].key == "aa");
var rows = db.query(queryFun, null, {
endkey : "b", endkey_docid: "11",
inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "b")
+ T(rows[rows.length-1].key == "b");
};
diff --git a/share/www/script/test/view_collation_raw.js b/share/www/script/test/view_collation_raw.js
index 08f37fae..31624cdb 100644
--- a/share/www/script/test/view_collation_raw.js
+++ b/share/www/script/test/view_collation_raw.js
@@ -97,27 +97,27 @@ couchTests.view_collation_raw = function(debug) {
// the inclusive_end=true functionality is limited to endkey currently
// if you need inclusive_start=false for startkey, please do implement. ;)
var rows = db.view("test/test", {endkey : "b", inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b")
+ T(rows[rows.length-1].key == "b");
// descending=true
var rows = db.view("test/test", {endkey : "b",
descending:true, inclusive_end:true}).rows;
- T(rows[rows.length-1].key == "b")
+ T(rows[rows.length-1].key == "b");
// test inclusive_end=false
var rows = db.view("test/test", {endkey : "b", inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa")
+ T(rows[rows.length-1].key == "aa");
// descending=true
var rows = db.view("test/test", {endkey : "b",
descending:true, inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "ba")
+ T(rows[rows.length-1].key == "ba");
var rows = db.view("test/test", {
endkey : "b", endkey_docid: "10",
inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa")
+ T(rows[rows.length-1].key == "aa");
var rows = db.view("test/test", {
endkey : "b", endkey_docid: "11",
inclusive_end:false}).rows;
- T(rows[rows.length-1].key == "aa")
+ T(rows[rows.length-1].key == "aa");
};
diff --git a/share/www/script/test/view_compaction.js b/share/www/script/test/view_compaction.js
new file mode 100644
index 00000000..a11fb7bd
--- /dev/null
+++ b/share/www/script/test/view_compaction.js
@@ -0,0 +1,104 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+couchTests.view_compaction = function(debug) {
+
+ if (debug) debugger;
+
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit": "true"});
+
+ db.deleteDb();
+ db.createDb();
+
+ var ddoc = {
+ _id: "_design/foo",
+ language: "javascript",
+ views: {
+ view1: {
+ map: "function(doc) { emit(doc._id, doc.value) }"
+ },
+ view2: {
+ map: "function(doc) { emit(doc._id, doc.value); }",
+ reduce: "function(keys, values, rereduce) { return sum(values); }"
+ }
+ }
+ };
+ T(db.save(ddoc).ok);
+
+ var docs = makeDocs(0, 1000);
+ db.bulkSave(docs);
+
+ var resp = db.view('foo/view1', {});
+ T(resp.rows.length === 1000);
+
+ resp = db.view('foo/view2', {});
+ T(resp.rows.length === 1);
+
+ resp = db.designInfo("_design/foo");
+ T(resp.view_index.update_seq === 1001);
+
+
+ // update docs
+ for (var i = 0; i < docs.length; i++) {
+ docs[i].integer = docs[i].integer + 1;
+ }
+ db.bulkSave(docs);
+
+
+ resp = db.view('foo/view1', {});
+ T(resp.rows.length === 1000);
+
+ resp = db.view('foo/view2', {});
+ T(resp.rows.length === 1);
+
+ resp = db.designInfo("_design/foo");
+ T(resp.view_index.update_seq === 2001);
+
+
+ // update docs again...
+ for (var i = 0; i < docs.length; i++) {
+ docs[i].integer = docs[i].integer + 2;
+ }
+ db.bulkSave(docs);
+
+
+ resp = db.view('foo/view1', {});
+ T(resp.rows.length === 1000);
+
+ resp = db.view('foo/view2', {});
+ T(resp.rows.length === 1);
+
+ resp = db.designInfo("_design/foo");
+ T(resp.view_index.update_seq === 3001);
+
+ var disk_size_before_compact = resp.view_index.disk_size;
+
+ // compact view group
+ var xhr = CouchDB.request("POST", "/" + db.name + "/_compact" + "/foo");
+ T(JSON.parse(xhr.responseText).ok === true);
+
+ resp = db.designInfo("_design/foo");
+ while (resp.view_index.compact_running === true) {
+ resp = db.designInfo("_design/foo");
+ }
+
+
+ resp = db.view('foo/view1', {});
+ T(resp.rows.length === 1000);
+
+ resp = db.view('foo/view2', {});
+ T(resp.rows.length === 1);
+
+ resp = db.designInfo("_design/foo");
+ T(resp.view_index.update_seq === 3001);
+ T(resp.view_index.disk_size < disk_size_before_compact);
+}; \ No newline at end of file
diff --git a/share/www/script/test/view_errors.js b/share/www/script/test/view_errors.js
index 0f90c46f..e8bd08e4 100644
--- a/share/www/script/test/view_errors.js
+++ b/share/www/script/test/view_errors.js
@@ -16,8 +16,6 @@ couchTests.view_errors = function(debug) {
db.createDb();
if (debug) debugger;
-
-
run_on_modified_server(
[{section: "couchdb",
key: "os_process_timeout",
@@ -26,12 +24,13 @@ couchTests.view_errors = function(debug) {
var doc = {integer: 1, string: "1", array: [1, 2, 3]};
T(db.save(doc).ok);
- // emitting a key value that is undefined should result in that row not
- // being included in the view results
+ // emitting a key value that is undefined should result in that row
+ // being included in the view results as null
var results = db.query(function(doc) {
emit(doc.undef, null);
});
- T(results.total_rows == 0);
+ T(results.total_rows == 1);
+ T(results.rows[0].key == null);
// if a view function throws an exception, its results are not included in
// the view index, but the view does not itself raise an error
@@ -41,13 +40,13 @@ couchTests.view_errors = function(debug) {
T(results.total_rows == 0);
// if a view function includes an undefined value in the emitted key or
- // value, an error is logged and the result is not included in the view
- // index, and the view itself does not raise an error
+ // value, it is treated as null
var results = db.query(function(doc) {
emit([doc._id, doc.undef], null);
});
- T(results.total_rows == 0);
-
+ T(results.total_rows == 1);
+ T(results.rows[0].key[1] == null);
+
// querying a view with invalid params should give a resonable error message
var xhr = CouchDB.request("POST", "/test_suite_db/_temp_view?startkey=foo", {
headers: {"Content-Type": "application/json"},
@@ -57,14 +56,14 @@ couchTests.view_errors = function(debug) {
});
T(JSON.parse(xhr.responseText).error == "bad_request");
- // views should ignore Content-Type, like the rest of CouchDB
+ // content type must be json
var xhr = CouchDB.request("POST", "/test_suite_db/_temp_view", {
headers: {"Content-Type": "application/x-www-form-urlencoded"},
body: JSON.stringify({language: "javascript",
map : "function(doc){}"
})
});
- T(xhr.status == 200);
+ T(xhr.status == 415);
var map = function (doc) {emit(doc.integer, doc.integer);};
@@ -75,9 +74,6 @@ couchTests.view_errors = function(debug) {
T(e.error == "query_parse_error");
}
- // reduce=false on map views doesn't work, so group=true will
- // never throw for temp reduce views.
-
var designDoc = {
_id:"_design/test",
language: "javascript",
@@ -105,6 +101,15 @@ couchTests.view_errors = function(debug) {
db.view("test/no_reduce", {group: true});
T(0 == 1);
} catch(e) {
+ T(db.last_req.status == 400);
+ T(e.error == "query_parse_error");
+ }
+
+ try {
+ db.view("test/no_reduce", {group_level: 1});
+ T(0 == 1);
+ } catch(e) {
+ T(db.last_req.status == 400);
T(e.error == "query_parse_error");
}
@@ -116,10 +121,23 @@ couchTests.view_errors = function(debug) {
T(e.error == "query_parse_error");
}
+ db.view("test/no_reduce", {reduce: false});
+ TEquals(200, db.last_req.status, "reduce=false for map views (without"
+ + " group or group_level) is allowed");
+
try {
db.view("test/with_reduce", {group: true, reduce: false});
T(0 == 1);
} catch(e) {
+ T(db.last_req.status == 400);
+ T(e.error == "query_parse_error");
+ }
+
+ try {
+ db.view("test/with_reduce", {group_level: 1, reduce: false});
+ T(0 == 1);
+ } catch(e) {
+ T(db.last_req.status == 400);
T(e.error == "query_parse_error");
}
@@ -159,5 +177,13 @@ couchTests.view_errors = function(debug) {
T(xhr.status == 500);
result = JSON.parse(xhr.responseText);
T(result.error == "reduce_overflow_error");
+
+ try {
+ db.query(function() {emit(null, null)}, null, {startkey: 2, endkey:1});
+ T(0 == 1);
+ } catch(e) {
+ T(e.error == "query_parse_error");
+ T(e.reason.match(/no rows can match/i));
+ }
});
};
diff --git a/share/www/script/test/view_multi_key_all_docs.js b/share/www/script/test/view_multi_key_all_docs.js
index 62e49665..1113be4d 100644
--- a/share/www/script/test/view_multi_key_all_docs.js
+++ b/share/www/script/test/view_multi_key_all_docs.js
@@ -25,24 +25,52 @@ couchTests.view_multi_key_all_docs = function(debug) {
for(var i=0; i<rows.length; i++)
T(rows[i].id == keys[i]);
+ // keys in GET parameters
+ rows = db.allDocs({keys:keys}, null).rows;
+ T(rows.length == keys.length);
+ for(var i=0; i<rows.length; i++)
+ T(rows[i].id == keys[i]);
+
rows = db.allDocs({limit: 1}, keys).rows;
T(rows.length == 1);
T(rows[0].id == keys[0]);
+ // keys in GET parameters
+ rows = db.allDocs({limit: 1, keys: keys}, null).rows;
+ T(rows.length == 1);
+ T(rows[0].id == keys[0]);
+
rows = db.allDocs({skip: 2}, keys).rows;
T(rows.length == 3);
for(var i=0; i<rows.length; i++)
T(rows[i].id == keys[i+2]);
+ // keys in GET parameters
+ rows = db.allDocs({skip: 2, keys: keys}, null).rows;
+ T(rows.length == 3);
+ for(var i=0; i<rows.length; i++)
+ T(rows[i].id == keys[i+2]);
+
rows = db.allDocs({descending: "true"}, keys).rows;
T(rows.length == keys.length);
for(var i=0; i<rows.length; i++)
T(rows[i].id == keys[keys.length-i-1]);
+ // keys in GET parameters
+ rows = db.allDocs({descending: "true", keys: keys}, null).rows;
+ T(rows.length == keys.length);
+ for(var i=0; i<rows.length; i++)
+ T(rows[i].id == keys[keys.length-i-1]);
+
rows = db.allDocs({descending: "true", skip: 3, limit:1}, keys).rows;
T(rows.length == 1);
T(rows[0].id == keys[1]);
+ // keys in GET parameters
+ rows = db.allDocs({descending: "true", skip: 3, limit:1, keys: keys}, null).rows;
+ T(rows.length == 1);
+ T(rows[0].id == keys[1]);
+
// Check we get invalid rows when the key doesn't exist
rows = db.allDocs({}, [1, "i_dont_exist", "0"]).rows;
T(rows.length == 3);
@@ -51,4 +79,13 @@ couchTests.view_multi_key_all_docs = function(debug) {
T(rows[1].error == "not_found");
T(!rows[1].id);
T(rows[2].id == rows[2].key && rows[2].key == "0");
+
+ // keys in GET parameters
+ rows = db.allDocs({keys: [1, "i_dont_exist", "0"]}, null).rows;
+ T(rows.length == 3);
+ T(rows[0].error == "not_found");
+ T(!rows[0].id);
+ T(rows[1].error == "not_found");
+ T(!rows[1].id);
+ T(rows[2].id == rows[2].key && rows[2].key == "0");
};
diff --git a/share/www/script/test/view_multi_key_design.js b/share/www/script/test/view_multi_key_design.js
index 5a2f645d..38396955 100644
--- a/share/www/script/test/view_multi_key_design.js
+++ b/share/www/script/test/view_multi_key_design.js
@@ -34,11 +34,11 @@ couchTests.view_multi_key_design = function(debug) {
reduce:"function (keys, values) { return sum(values); };"
}
}
- }
+ };
T(db.save(designDoc).ok);
// Test that missing keys work too
- var keys = [101,30,15,37,50]
+ var keys = [101,30,15,37,50];
var reduce = db.view("test/summate",{group:true},keys).rows;
T(reduce.length == keys.length-1); // 101 is missing
for(var i=0; i<reduce.length; i++) {
@@ -54,6 +54,13 @@ couchTests.view_multi_key_design = function(debug) {
T(rows[i].key == rows[i].value);
}
+ // with GET keys
+ rows = db.view("test/all_docs",{keys:keys},null).rows;
+ for(var i=0;i<rows.length; i++) {
+ T(keys.indexOf(rows[i].key) != -1);
+ T(rows[i].key == rows[i].value);
+ }
+
var reduce = db.view("test/summate",{group:true},keys).rows;
T(reduce.length == keys.length);
for(var i=0; i<reduce.length; i++) {
@@ -61,8 +68,18 @@ couchTests.view_multi_key_design = function(debug) {
T(reduce[i].key == reduce[i].value);
}
+ // with GET keys
+ reduce = db.view("test/summate",{group:true,keys:keys},null).rows;
+ T(reduce.length == keys.length);
+ for(var i=0; i<reduce.length; i++) {
+ T(keys.indexOf(reduce[i].key) != -1);
+ T(reduce[i].key == reduce[i].value);
+ }
+
// Test that invalid parameter combinations get rejected
var badargs = [{startkey:0}, {endkey:0}, {key: 0}, {group_level: 2}];
+ var getbadargs = [{startkey:0, keys:keys}, {endkey:0, keys:keys},
+ {key:0, keys:keys}, {group_level: 2, keys:keys}];
for(var i in badargs)
{
try {
@@ -71,6 +88,13 @@ couchTests.view_multi_key_design = function(debug) {
} catch (e) {
T(e.error == "query_parse_error");
}
+
+ try {
+ db.view("test/all_docs",getbadargs[i],null);
+ T(0==1);
+ } catch (e) {
+ T(e.error = "query_parse_error");
+ }
}
try {
@@ -80,8 +104,18 @@ couchTests.view_multi_key_design = function(debug) {
T(e.error == "query_parse_error");
}
+ try {
+ db.view("test/summate",{keys:keys},null);
+ T(0==1);
+ } catch (e) {
+ T(e.error == "query_parse_error");
+ }
+
// Test that a map & reduce containing func support keys when reduce=false
- resp = db.view("test/summate", {reduce: false}, keys);
+ var resp = db.view("test/summate", {reduce: false}, keys);
+ T(resp.rows.length == 5);
+
+ resp = db.view("test/summate", {reduce: false, keys: keys}, null);
T(resp.rows.length == 5);
// Check that limiting by startkey_docid and endkey_docid get applied
@@ -96,34 +130,66 @@ couchTests.view_multi_key_design = function(debug) {
T(curr[i].value == exp_val[i]);
}
+ curr = db.view("test/multi_emit", {startkey_docid: 21, endkey_docid: 23, keys: [0, 2]}, null).rows;
+ T(curr.length == 6);
+ for( var i = 0 ; i < 6 ; i++)
+ {
+ T(curr[i].key == exp_key[i]);
+ T(curr[i].value == exp_val[i]);
+ }
+
// Check limit works
curr = db.view("test/all_docs", {limit: 1}, keys).rows;
T(curr.length == 1);
T(curr[0].key == 10);
+ curr = db.view("test/all_docs", {limit: 1, keys: keys}, null).rows;
+ T(curr.length == 1);
+ T(curr[0].key == 10);
+
// Check offset works
curr = db.view("test/multi_emit", {skip: 1}, [0]).rows;
T(curr.length == 99);
T(curr[0].value == 1);
+ curr = db.view("test/multi_emit", {skip: 1, keys: [0]}, null).rows;
+ T(curr.length == 99);
+ T(curr[0].value == 1);
+
// Check that dir works
curr = db.view("test/multi_emit", {descending: "true"}, [1]).rows;
T(curr.length == 100);
T(curr[0].value == 99);
T(curr[99].value == 0);
+ curr = db.view("test/multi_emit", {descending: "true", keys: [1]}, null).rows;
+ T(curr.length == 100);
+ T(curr[0].value == 99);
+ T(curr[99].value == 0);
+
// Check a couple combinations
curr = db.view("test/multi_emit", {descending: "true", skip: 3, limit: 2}, [2]).rows;
T(curr.length, 2);
T(curr[0].value == 96);
T(curr[1].value == 95);
+ curr = db.view("test/multi_emit", {descending: "true", skip: 3, limit: 2, keys: [2]}, null).rows;
+ T(curr.length, 2);
+ T(curr[0].value == 96);
+ T(curr[1].value == 95);
+
curr = db.view("test/multi_emit", {skip: 2, limit: 3, startkey_docid: "13"}, [0]).rows;
T(curr.length == 3);
T(curr[0].value == 15);
T(curr[1].value == 16);
T(curr[2].value == 17);
+ curr = db.view("test/multi_emit", {skip: 2, limit: 3, startkey_docid: "13", keys: [0]}, null).rows;
+ T(curr.length == 3);
+ T(curr[0].value == 15);
+ T(curr[1].value == 16);
+ T(curr[2].value == 17);
+
curr = db.view("test/multi_emit",
{skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27"}, [1]).rows;
T(curr.length == 2);
@@ -131,8 +197,20 @@ couchTests.view_multi_key_design = function(debug) {
T(curr[1].value == 27);
curr = db.view("test/multi_emit",
+ {skip: 1, limit: 5, startkey_docid: "25", endkey_docid: "27", keys: [1]}, null).rows;
+ T(curr.length == 2);
+ T(curr[0].value == 26);
+ T(curr[1].value == 27);
+
+ curr = db.view("test/multi_emit",
{skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: "true"}, [1]).rows;
T(curr.length == 2);
T(curr[0].value == 27);
T(curr[1].value == 26);
+
+ curr = db.view("test/multi_emit",
+ {skip: 1, limit: 5, startkey_docid: "28", endkey_docid: "26", descending: "true", keys: [1]}, null).rows;
+ T(curr.length == 2);
+ T(curr[0].value == 27);
+ T(curr[1].value == 26);
};
diff --git a/share/www/script/test/view_pagination.js b/share/www/script/test/view_pagination.js
index 1af2df35..ed3a7ee1 100644
--- a/share/www/script/test/view_pagination.js
+++ b/share/www/script/test/view_pagination.js
@@ -19,7 +19,7 @@ couchTests.view_pagination = function(debug) {
var docs = makeDocs(0, 100);
db.bulkSave(docs);
- var queryFun = function(doc) { emit(doc.integer, null) };
+ var queryFun = function(doc) { emit(doc.integer, null); };
var i;
// page through the view ascending
@@ -29,13 +29,26 @@ couchTests.view_pagination = function(debug) {
startkey_docid: i,
limit: 10
});
- T(queryResults.rows.length == 10)
- T(queryResults.total_rows == docs.length)
- T(queryResults.offset == i)
+ T(queryResults.rows.length == 10);
+ T(queryResults.total_rows == docs.length);
+ T(queryResults.offset == i);
var j;
for (j = 0; j < 10;j++) {
T(queryResults.rows[j].key == i + j);
}
+
+ // test aliases start_key and start_key_doc_id
+ queryResults = db.query(queryFun, null, {
+ start_key: i,
+ start_key_doc_id: i,
+ limit: 10
+ });
+ T(queryResults.rows.length == 10);
+ T(queryResults.total_rows == docs.length);
+ T(queryResults.offset == i);
+ for (j = 0; j < 10;j++) {
+ T(queryResults.rows[j].key == i + j);
+ }
}
// page through the view descending
@@ -46,9 +59,9 @@ couchTests.view_pagination = function(debug) {
descending: true,
limit: 10
});
- T(queryResults.rows.length == 10)
- T(queryResults.total_rows == docs.length)
- T(queryResults.offset == docs.length - i - 1)
+ T(queryResults.rows.length == 10);
+ T(queryResults.total_rows == docs.length);
+ T(queryResults.offset == docs.length - i - 1);
var j;
for (j = 0; j < 10; j++) {
T(queryResults.rows[j].key == i - j);
@@ -63,60 +76,72 @@ couchTests.view_pagination = function(debug) {
descending: false,
limit: 10
});
- T(queryResults.rows.length == 10)
- T(queryResults.total_rows == docs.length)
- T(queryResults.offset == i)
+ T(queryResults.rows.length == 10);
+ T(queryResults.total_rows == docs.length);
+ T(queryResults.offset == i);
var j;
for (j = 0; j < 10;j++) {
T(queryResults.rows[j].key == i + j);
}
}
+ function testEndkeyDocId(queryResults) {
+ T(queryResults.rows.length == 35);
+ T(queryResults.total_rows == docs.length);
+ T(queryResults.offset == 1);
+ T(queryResults.rows[0].id == "1");
+ T(queryResults.rows[1].id == "10");
+ T(queryResults.rows[2].id == "11");
+ T(queryResults.rows[3].id == "12");
+ T(queryResults.rows[4].id == "13");
+ T(queryResults.rows[5].id == "14");
+ T(queryResults.rows[6].id == "15");
+ T(queryResults.rows[7].id == "16");
+ T(queryResults.rows[8].id == "17");
+ T(queryResults.rows[9].id == "18");
+ T(queryResults.rows[10].id == "19");
+ T(queryResults.rows[11].id == "2");
+ T(queryResults.rows[12].id == "20");
+ T(queryResults.rows[13].id == "21");
+ T(queryResults.rows[14].id == "22");
+ T(queryResults.rows[15].id == "23");
+ T(queryResults.rows[16].id == "24");
+ T(queryResults.rows[17].id == "25");
+ T(queryResults.rows[18].id == "26");
+ T(queryResults.rows[19].id == "27");
+ T(queryResults.rows[20].id == "28");
+ T(queryResults.rows[21].id == "29");
+ T(queryResults.rows[22].id == "3");
+ T(queryResults.rows[23].id == "30");
+ T(queryResults.rows[24].id == "31");
+ T(queryResults.rows[25].id == "32");
+ T(queryResults.rows[26].id == "33");
+ T(queryResults.rows[27].id == "34");
+ T(queryResults.rows[28].id == "35");
+ T(queryResults.rows[29].id == "36");
+ T(queryResults.rows[30].id == "37");
+ T(queryResults.rows[31].id == "38");
+ T(queryResults.rows[32].id == "39");
+ T(queryResults.rows[33].id == "4");
+ T(queryResults.rows[34].id == "40");
+ }
+
// test endkey_docid
- var queryResults = db.query(function(doc) { emit(null, null);}, null, {
+ var queryResults = db.query(function(doc) { emit(null, null); }, null, {
startkey: null,
startkey_docid: 1,
endkey: null,
endkey_docid: 40
});
+ testEndkeyDocId(queryResults);
- T(queryResults.rows.length == 35)
- T(queryResults.total_rows == docs.length)
- T(queryResults.offset == 1)
- T(queryResults.rows[0].id == "1");
- T(queryResults.rows[1].id == "10");
- T(queryResults.rows[2].id == "11");
- T(queryResults.rows[3].id == "12");
- T(queryResults.rows[4].id == "13");
- T(queryResults.rows[5].id == "14");
- T(queryResults.rows[6].id == "15");
- T(queryResults.rows[7].id == "16");
- T(queryResults.rows[8].id == "17");
- T(queryResults.rows[9].id == "18");
- T(queryResults.rows[10].id == "19");
- T(queryResults.rows[11].id == "2");
- T(queryResults.rows[12].id == "20");
- T(queryResults.rows[13].id == "21");
- T(queryResults.rows[14].id == "22");
- T(queryResults.rows[15].id == "23");
- T(queryResults.rows[16].id == "24");
- T(queryResults.rows[17].id == "25");
- T(queryResults.rows[18].id == "26");
- T(queryResults.rows[19].id == "27");
- T(queryResults.rows[20].id == "28");
- T(queryResults.rows[21].id == "29");
- T(queryResults.rows[22].id == "3");
- T(queryResults.rows[23].id == "30");
- T(queryResults.rows[24].id == "31");
- T(queryResults.rows[25].id == "32");
- T(queryResults.rows[26].id == "33");
- T(queryResults.rows[27].id == "34");
- T(queryResults.rows[28].id == "35");
- T(queryResults.rows[29].id == "36");
- T(queryResults.rows[30].id == "37");
- T(queryResults.rows[31].id == "38");
- T(queryResults.rows[32].id == "39");
- T(queryResults.rows[33].id == "4");
- T(queryResults.rows[34].id == "40");
+ // test aliases end_key_doc_id and end_key
+ queryResults = db.query(function(doc) { emit(null, null); }, null, {
+ start_key: null,
+ start_key_doc_id: 1,
+ end_key: null,
+ end_key_doc_id: 40
+ });
+ testEndkeyDocId(queryResults);
};
diff --git a/share/www/script/test/view_sandboxing.js b/share/www/script/test/view_sandboxing.js
index 9f893b28..02951d9f 100644
--- a/share/www/script/test/view_sandboxing.js
+++ b/share/www/script/test/view_sandboxing.js
@@ -42,11 +42,99 @@ couchTests.view_sandboxing = function(debug) {
// make sure that a view cannot access the map_funs array defined used by
// the view server
- var results = db.query(function(doc) { map_funs.push(1); emit(null, doc) });
+ var results = db.query(function(doc) { map_funs.push(1); emit(null, doc); });
T(results.total_rows == 0);
// make sure that a view cannot access the map_results array defined used by
// the view server
- var results = db.query(function(doc) { map_results.push(1); emit(null, doc) });
+ var results = db.query(function(doc) { map_results.push(1); emit(null, doc); });
T(results.total_rows == 0);
+
+ // test for COUCHDB-925
+ // altering 'doc' variable in map function affects other map functions
+ var ddoc = {
+ _id: "_design/foobar",
+ language: "javascript",
+ views: {
+ view1: {
+ map:
+ (function(doc) {
+ if (doc.values) {
+ doc.values = [666];
+ }
+ if (doc.tags) {
+ doc.tags.push("qwerty");
+ }
+ if (doc.tokens) {
+ doc.tokens["c"] = 3;
+ }
+ }).toString()
+ },
+ view2: {
+ map:
+ (function(doc) {
+ if (doc.values) {
+ emit(doc._id, doc.values);
+ }
+ if (doc.tags) {
+ emit(doc._id, doc.tags);
+ }
+ if (doc.tokens) {
+ emit(doc._id, doc.tokens);
+ }
+ }).toString()
+ }
+ }
+ };
+ var doc1 = {
+ _id: "doc1",
+ values: [1, 2, 3]
+ };
+ var doc2 = {
+ _id: "doc2",
+ tags: ["foo", "bar"],
+ tokens: {a: 1, b: 2}
+ };
+
+ db.deleteDb();
+ db.createDb();
+ T(db.save(ddoc).ok);
+ T(db.save(doc1).ok);
+ T(db.save(doc2).ok);
+
+ var view1Results = db.view(
+ "foobar/view1", {bypass_cache: Math.round(Math.random() * 1000)});
+ var view2Results = db.view(
+ "foobar/view2", {bypass_cache: Math.round(Math.random() * 1000)});
+
+ TEquals(0, view1Results.rows.length, "view1 has 0 rows");
+ TEquals(3, view2Results.rows.length, "view2 has 3 rows");
+
+ TEquals(doc1._id, view2Results.rows[0].key);
+ TEquals(doc2._id, view2Results.rows[1].key);
+ TEquals(doc2._id, view2Results.rows[2].key);
+
+ // https://bugzilla.mozilla.org/show_bug.cgi?id=449657
+ TEquals(3, view2Results.rows[0].value.length,
+ "Warning: installed SpiderMonkey version doesn't allow sealing of arrays");
+ if (view2Results.rows[0].value.length === 3) {
+ TEquals(1, view2Results.rows[0].value[0]);
+ TEquals(2, view2Results.rows[0].value[1]);
+ TEquals(3, view2Results.rows[0].value[2]);
+ }
+
+ TEquals(1, view2Results.rows[1].value["a"]);
+ TEquals(2, view2Results.rows[1].value["b"]);
+ TEquals('undefined', typeof view2Results.rows[1].value["c"],
+ "doc2.tokens object was not sealed");
+
+ TEquals(2, view2Results.rows[2].value.length,
+ "Warning: installed SpiderMonkey version doesn't allow sealing of arrays");
+ if (view2Results.rows[2].value.length === 2) {
+ TEquals("foo", view2Results.rows[2].value[0]);
+ TEquals("bar", view2Results.rows[2].value[1]);
+ }
+
+ // cleanup
+ db.deleteDb();
};
diff --git a/share/www/script/test/view_update_seq.js b/share/www/script/test/view_update_seq.js
new file mode 100644
index 00000000..69b8c42d
--- /dev/null
+++ b/share/www/script/test/view_update_seq.js
@@ -0,0 +1,106 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+couchTests.view_update_seq = function(debug) {
+ var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"true"});
+ db.deleteDb();
+ db.createDb();
+ if (debug) debugger;
+
+ T(db.info().update_seq == 0);
+
+ var resp = db.allDocs({update_seq:true});
+
+ T(resp.rows.length == 0);
+ T(resp.update_seq == 0);
+
+ var designDoc = {
+ _id:"_design/test",
+ language: "javascript",
+ views: {
+ all_docs: {
+ map: "function(doc) { emit(doc.integer, doc.string) }"
+ },
+ summate: {
+ map:"function (doc) {emit(doc.integer, doc.integer)};",
+ reduce:"function (keys, values) { return sum(values); };"
+ }
+ }
+ };
+ T(db.save(designDoc).ok);
+
+ T(db.info().update_seq == 1);
+
+ resp = db.allDocs({update_seq:true});
+
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 1);
+
+ var docs = makeDocs(0, 100);
+ db.bulkSave(docs);
+
+ resp = db.allDocs({limit: 1});
+ T(resp.rows.length == 1);
+ T(!resp.update_seq, "all docs");
+
+ resp = db.allDocs({limit: 1, update_seq:true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 101);
+
+ resp = db.view('test/all_docs', {limit: 1, update_seq:true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 101);
+
+ resp = db.view('test/all_docs', {limit: 1, update_seq:false});
+ T(resp.rows.length == 1);
+ T(!resp.update_seq, "view");
+
+ resp = db.view('test/summate', {update_seq:true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 101);
+
+ db.save({"id":"0"});
+ resp = db.view('test/all_docs', {limit: 1,stale: "ok", update_seq:true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 101);
+
+ db.save({"id":"00"});
+ resp = db.view('test/all_docs',
+ {limit: 1, stale: "update_after", update_seq: true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 101);
+
+ // wait 5 seconds for the next assertions to pass in very slow machines
+ var t0 = new Date(), t1;
+ do {
+ CouchDB.request("GET", "/");
+ t1 = new Date();
+ } while ((t1 - t0) < 5000);
+
+ resp = db.view('test/all_docs', {limit: 1, stale: "ok", update_seq: true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 103);
+
+ resp = db.view('test/all_docs', {limit: 1, update_seq:true});
+ T(resp.rows.length == 1);
+ T(resp.update_seq == 103);
+
+ resp = db.view('test/all_docs',{update_seq:true},["0","1"]);
+ T(resp.update_seq == 103);
+
+ resp = db.view('test/all_docs',{update_seq:true},["0","1"]);
+ T(resp.update_seq == 103);
+
+ resp = db.view('test/summate',{group:true, update_seq:true},["0","1"]);
+ T(resp.update_seq == 103);
+
+};
diff --git a/share/www/script/test/view_xml.js b/share/www/script/test/view_xml.js
index 451fb6a8..3403b47c 100644
--- a/share/www/script/test/view_xml.js
+++ b/share/www/script/test/view_xml.js
@@ -22,7 +22,7 @@ couchTests.view_xml = function(debug) {
var results = db.query(
"function(doc) {\n" +
" var xml = new XML(doc.content);\n" +
- " emit(xml.title.text(), null);\n" +
+ " emit(xml.title.text().toXMLString(), null);\n" +
"}");
T(results.total_rows == 2);
T(results.rows[0].key == "Testing E4X");
@@ -31,7 +31,7 @@ couchTests.view_xml = function(debug) {
var results = db.query(
"function(doc) {\n" +
" var xml = new XML(doc.content);\n" +
- " emit(xml.title.@id, null);\n" +
+ " emit(xml.title.@id.toXMLString(), null);\n" +
"}");
T(results.total_rows == 2);
T(results.rows[0].key == "e4x");
diff --git a/share/www/session.html b/share/www/session.html
new file mode 100644
index 00000000..0ebd943d
--- /dev/null
+++ b/share/www/session.html
@@ -0,0 +1,96 @@
+<!DOCTYPE html>
+<!--
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use
+this file except in compliance with the License. You may obtain a copy of the
+License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed
+under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
+
+-->
+<html lang="en">
+ <head>
+ <title>Session</title>
+ <meta http-equiv="Content-Type" content="text/html;charset=utf-8">
+ <link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
+ <script src="script/json2.js"></script>
+ <script src="script/sha1.js"></script>
+ <script src="script/jquery.js?1.4.2"></script>
+ <script src="script/jquery.couch.js?0.11.0"></script>
+ <script src="script/jquery.dialog.js?0.11.0"></script>
+ <script src="script/futon.js?0.11.0"></script>
+ <script src="script/futon.browse.js?0.11.0"></script>
+ <script src="script/futon.format.js?0.11.0"></script>
+ <script>
+ $(function() {
+ var ret, reason, q = window.location.search, qps = q.split("&");
+ $.map(qps, function(qp) {
+ var m = qp.match(/return=(.*)/);
+ if (m) {
+ ret = decodeURIComponent(m[1]);
+ }
+ m = qp.match(/reason=(.*)/);
+ if (m) {
+ reason = $.futon.escape(decodeURIComponent(m[1]));
+ }
+ });
+ if (reason) {
+ $("#aboutSession").append('<p>The application says: <em>'+reason+'</em></p>');
+ }
+ if (ret) {
+ $("#aboutSession").append($('<p>Once you are logged in, click this link to return to your application: </p>').append($("<a></a>").attr("href", ret).text(ret)));
+ // todo this needs to look different if you are already logged in
+ // a note about you are logged in but you can't access this
+ }
+ // do the sidebar but in the middle without the sidebar
+ $.futon.storage.set("sidebar", "hidden");
+ setTimeout(function() {
+ var ctx = $$("#userCtx").userCtx;
+ $.futon.storage.set("sidebar", "show");
+ if (ctx && ctx.name) {
+ $("#aboutSession").append("<p>It looks like you are logged in, maybe you don't have access to that url.</p>");
+ }
+ },100);
+ });
+ </script>
+ </head>
+ <body>
+ <div id="wrap">
+ <h1><a href="index.html">Overview</a>
+ <strong>Session</strong></h1>
+ <div id="content">
+ <h2>Establish or Modify Your Session</h2>
+ <div id="loginSignup">
+ <div id="aboutSession"></div>
+ <span id="userCtx">
+ <span class="loggedout">
+ <a href="#" class="signup">Signup</a> or <a href="#" class="login">Login</a>
+ </span>
+ <span class="loggedin">
+ Welcome <a class="name">?</a>!
+ <br/>
+ <a href="#" class="logout">Logout</a>
+ </span>
+ <span class="loggedinadmin">
+ Welcome <a class="name">?</a>!
+ <br/>
+ <a href="#" class="createadmin">Setup more admins</a> or
+ <a href="#" class="logout">Logout</a>
+ </span>
+ <span class="adminparty">
+ Welcome to Admin Party!
+ <br/>
+ Everyone is admin. <a href="#" class="createadmin">Fix this</a>
+ </span>
+ </span>
+ </div>
+ </div>
+
+ </div>
+ </body>
+</html>
diff --git a/share/www/spec/couch_js_class_methods_spec.js b/share/www/spec/couch_js_class_methods_spec.js
new file mode 100644
index 00000000..7eac2348
--- /dev/null
+++ b/share/www/spec/couch_js_class_methods_spec.js
@@ -0,0 +1,401 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for couch.js lines 313-470
+
+describe 'CouchDB class'
+ describe 'session stuff'
+ before
+ useTestUserDb();
+ end
+
+ after
+ useOldUserDb();
+ end
+
+ before_each
+ userDoc = users_db.save(CouchDB.prepareUserDoc({name: "Gaius Baltar", roles: ["president"]}, "secretpass"));
+ end
+
+ after_each
+ users_db.deleteDoc({_id : userDoc.id, _rev : userDoc.rev})
+ end
+
+ describe '.login'
+ it 'should return ok true'
+ CouchDB.login("Gaius Baltar", "secretpass").ok.should.be_true
+ end
+
+ it 'should return the name of the logged in user'
+ CouchDB.login("Gaius Baltar", "secretpass").name.should.eql "Gaius Baltar"
+ end
+
+ it 'should return the roles of the logged in user'
+ CouchDB.login("Gaius Baltar", "secretpass").roles.should.eql ["president"]
+ end
+
+ it 'should post _session'
+ CouchDB.should.receive("request", "once").with_args("POST", "/_session")
+ CouchDB.login("Gaius Baltar", "secretpass");
+ end
+
+ it 'should create a session'
+ CouchDB.login("Gaius Baltar", "secretpass");
+ CouchDB.session().userCtx.name.should.eql "Gaius Baltar"
+ end
+ end
+
+ describe '.logout'
+ before_each
+ CouchDB.login("Gaius Baltar", "secretpass");
+ end
+
+ it 'should return ok true'
+ CouchDB.logout().ok.should.be_true
+ end
+
+ it 'should delete _session'
+ CouchDB.should.receive("request", "once").with_args("DELETE", "/_session")
+ CouchDB.logout();
+ end
+
+ it 'should result in an invalid session'
+ CouchDB.logout();
+ CouchDB.session().name.should.be_null
+ end
+ end
+
+ describe '.session'
+ before_each
+ CouchDB.login("Gaius Baltar", "secretpass");
+ end
+
+ it 'should return ok true'
+ CouchDB.session().ok.should.be_true
+ end
+
+ it 'should return the users name'
+ CouchDB.session().userCtx.name.should.eql "Gaius Baltar"
+ end
+
+ it 'should return the users roles'
+ CouchDB.session().userCtx.roles.should.eql ["president"]
+ end
+
+ it 'should return the name of the authentication db'
+ CouchDB.session().info.authentication_db.should.eql "spec_users_db"
+ end
+
+ it 'should return the active authentication handler'
+ CouchDB.session().info.authenticated.should.eql "cookie"
+ end
+ end
+ end
+
+ describe 'db stuff'
+ before_each
+ db = new CouchDB("spec_db", {"X-Couch-Full-Commit":"false"});
+ db.createDb();
+ end
+
+ after_each
+ db.deleteDb();
+ end
+
+ describe '.prepareUserDoc'
+ before_each
+ userDoc = CouchDB.prepareUserDoc({name: "Laura Roslin"}, "secretpass");
+ end
+
+ it 'should return the users name'
+ userDoc.name.should.eql "Laura Roslin"
+ end
+
+ it 'should prefix the id with the CouchDB user_prefix'
+ userDoc._id.should.eql "org.couchdb.user:Laura Roslin"
+ end
+
+ it 'should return the users roles'
+ var userDocWithRoles = CouchDB.prepareUserDoc({name: "William Adama", roles: ["admiral", "commander"]}, "secretpass")
+ userDocWithRoles.roles.should.eql ["admiral", "commander"]
+ end
+
+ it 'should return the hashed password'
+ userDoc.password_sha.length.should.be_at_least 30
+ userDoc.password_sha.should.be_a String
+ end
+ end
+
+ describe '.allDbs'
+ it 'should get _all_dbs'
+ CouchDB.should.receive("request", "once").with_args("GET", "/_all_dbs");
+ CouchDB.allDbs();
+ end
+
+ it 'should return an array that includes a created database'
+ temp_db = new CouchDB("temp_spec_db", {"X-Couch-Full-Commit":"false"});
+ temp_db.createDb();
+ CouchDB.allDbs().should.include("temp_spec_db");
+ temp_db.deleteDb();
+ end
+
+ it 'should return an array that does not include a database that does not exist'
+ CouchDB.allDbs().should.not.include("not_existing_temp_spec_db");
+ end
+ end
+
+ describe '.allDesignDocs'
+ it 'should return the total number of documents'
+ CouchDB.allDesignDocs().spec_db.total_rows.should.eql 0
+ db.save({'type':'battlestar', 'name':'galactica'});
+ CouchDB.allDesignDocs().spec_db.total_rows.should.eql 1
+ end
+
+ it 'should return undefined when the db does not exist'
+ CouchDB.allDesignDocs().non_existing_db.should.be_undefined
+ end
+
+ it 'should return no documents when there are no design documents'
+ CouchDB.allDesignDocs().spec_db.rows.should.eql []
+ end
+
+ it 'should return all design documents'
+ var designDoc = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc); }"
+ }
+ },
+ "_id" : "_design/spec_db"
+ };
+ db.save(designDoc);
+
+ var allDesignDocs = CouchDB.allDesignDocs();
+ allDesignDocs.spec_db.rows[0].id.should.eql "_design/spec_db"
+ allDesignDocs.spec_db.rows[0].key.should.eql "_design/spec_db"
+ allDesignDocs.spec_db.rows[0].value.rev.length.should.be_at_least 30
+ end
+ end
+
+ describe '.getVersion'
+ it 'should get the CouchDB version'
+ CouchDB.should.receive("request", "once").with_args("GET", "/")
+ CouchDB.getVersion();
+ end
+
+ it 'should return the CouchDB version'
+ CouchDB.getVersion().should_match /^\d\d?\.\d\d?\.\d\d?.*/
+ end
+ end
+
+ describe '.replicate'
+ before_each
+ db2 = new CouchDB("spec_db_2", {"X-Couch-Full-Commit":"false"});
+ db2.createDb();
+ host = window.location.protocol + "//" + window.location.host ;
+ end
+
+ after_each
+ db2.deleteDb();
+ end
+
+ it 'should return no_changes true when there are no changes between the dbs'
+ CouchDB.replicate(host + db.uri, host + db2.uri).no_changes.should.be_true
+ end
+
+ it 'should return the session ID'
+ db.save({'type':'battlestar', 'name':'galactica'});
+ CouchDB.replicate(host + db.uri, host + db2.uri).session_id.length.should.be_at_least 30
+ end
+
+ it 'should return source_last_seq'
+ db.save({'type':'battlestar', 'name':'galactica'});
+ db.save({'type':'battlestar', 'name':'pegasus'});
+
+ CouchDB.replicate(host + db.uri, host + db2.uri).source_last_seq.should.eql 2
+ end
+
+ it 'should return the replication history'
+ db.save({'type':'battlestar', 'name':'galactica'});
+ db.save({'type':'battlestar', 'name':'pegasus'});
+
+ var result = CouchDB.replicate(host + db.uri, host + db2.uri);
+ result.history[0].docs_written.should.eql 2
+ result.history[0].start_last_seq.should.eql 0
+ end
+
+ it 'should pass through replication options'
+ db.save({'type':'battlestar', 'name':'galactica'});
+ db2.deleteDb();
+ -{CouchDB.replicate(host + db.uri, host + db2.uri)}.should.throw_error
+ var result = CouchDB.replicate(host + db.uri, host + db2.uri, {"body" : {"create_target":true}});
+
+ result.ok.should.eql true
+ result.history[0].docs_written.should.eql 1
+ db2.info().db_name.should.eql "spec_db_2"
+ end
+ end
+
+ describe '.newXhr'
+ it 'should return a XMLHTTPRequest'
+ CouchDB.newXhr().should.have_prop 'readyState'
+ CouchDB.newXhr().should.have_prop 'responseText'
+ CouchDB.newXhr().should.have_prop 'status'
+ end
+ end
+
+ describe '.request'
+ it 'should return a XMLHttpRequest'
+ var req = CouchDB.request("GET", '/');
+ req.should.include "readyState"
+ req.should.include "responseText"
+ req.should.include "statusText"
+ end
+
+ it 'should pass through the options headers'
+ var xhr = CouchDB.newXhr();
+ stub(CouchDB, 'newXhr').and_return(xhr);
+
+ xhr.should.receive("setRequestHeader", "once").with_args("X-Couch-Full-Commit", "true")
+ CouchDB.request("GET", "/", {'headers': {"X-Couch-Full-Commit":"true"}});
+ end
+
+ it 'should pass through the options body'
+ var xhr = CouchDB.newXhr();
+ stub(CouchDB, 'newXhr').and_return(xhr);
+
+ xhr.should.receive("send", "once").with_args({"body_key":"body_value"})
+ CouchDB.request("GET", "/", {'body': {"body_key":"body_value"}});
+ end
+
+ it 'should prepend the urlPrefix to the uri'
+ var oldPrefix = CouchDB.urlPrefix;
+ CouchDB.urlPrefix = "/_utils";
+
+ var xhr = CouchDB.newXhr();
+ stub(CouchDB, 'newXhr').and_return(xhr);
+
+ xhr.should.receive("open", "once").with_args("GET", "/_utils/", false)
+ CouchDB.request("GET", "/", {'headers': {"X-Couch-Full-Commit":"true"}});
+
+ CouchDB.urlPrefix = oldPrefix;
+ end
+ end
+
+ describe '.requestStats'
+ it 'should get the stats for specified module and key'
+ var stats = CouchDB.requestStats('couchdb', 'open_databases', null);
+ stats.description.should.eql 'number of open databases'
+ stats.current.should.be_a Number
+ end
+
+ it 'should add flush true to the request when there is a test argument'
+ CouchDB.should.receive("request", "once").with_args("GET", "/_stats/httpd/requests?flush=true")
+ CouchDB.requestStats('httpd', 'requests', 'test');
+ end
+
+ it 'should still work when there is a test argument'
+ var stats = CouchDB.requestStats('httpd_status_codes', '200', 'test');
+ stats.description.should.eql 'number of HTTP 200 OK responses'
+ stats.sum.should.be_a Number
+ end
+ end
+
+ describe '.newUuids'
+ after_each
+ CouchDB.uuids_cache = [];
+ end
+
+ it 'should return the specified amount of uuids'
+ var uuids = CouchDB.newUuids(45);
+ uuids.should.have_length 45
+ end
+
+ it 'should return an array with uuids'
+ var uuids = CouchDB.newUuids(1);
+ uuids[0].should.be_a String
+ uuids[0].should.have_length 32
+ end
+
+ it 'should leave the uuids_cache with 100 uuids when theres no buffer size specified'
+ CouchDB.newUuids(23);
+ CouchDB.uuids_cache.should.have_length 100
+ end
+
+ it 'should leave the uuids_cache with the specified buffer size'
+ CouchDB.newUuids(23, 150);
+ CouchDB.uuids_cache.should.have_length 150
+ end
+
+ it 'should get the uuids from the uuids_cache when there are enough uuids in there'
+ CouchDB.newUuids(10);
+ CouchDB.newUuids(25);
+ CouchDB.uuids_cache.should.have_length 75
+ end
+
+ it 'should create new uuids and add as many as specified to the uuids_cache when there are not enough uuids in the cache'
+ CouchDB.newUuids(10);
+ CouchDB.newUuids(125, 60);
+ CouchDB.uuids_cache.should.have_length 160
+ end
+ end
+
+ describe '.maybeThrowError'
+ it 'should throw an error when the request has status 404'
+ var req = CouchDB.request("GET", "/nonexisting_db");
+ -{CouchDB.maybeThrowError(req)}.should.throw_error
+ end
+
+ it 'should throw an error when the request has status 412'
+ var req = CouchDB.request("PUT", "/spec_db");
+ -{CouchDB.maybeThrowError(req)}.should.throw_error
+ end
+
+ it 'should throw an error when the request has status 405'
+ var req = CouchDB.request("DELETE", "/_utils");
+ -{CouchDB.maybeThrowError(req)}.should.throw_error
+ end
+
+ it 'should throw the responseText of the request'
+ var req = CouchDB.request("GET", "/nonexisting_db");
+ try {
+ CouchDB.maybeThrowError(req)
+ } catch(e) {
+ e.error.should.eql JSON.parse(req.responseText).error
+ e.reason.should.eql JSON.parse(req.responseText).reason
+ }
+ end
+
+ it 'should throw an unknown error when the responseText is invalid json'
+ mock_request().and_return("invalid json...", "application/json", 404, {})
+ try {
+ CouchDB.maybeThrowError(CouchDB.newXhr())
+ } catch(e) {
+ e.error.should.eql "unknown"
+ e.reason.should.eql "invalid json..."
+ }
+ end
+ end
+
+ describe '.params'
+ it 'should turn a json object into a http params string'
+ var params = CouchDB.params({"president":"laura", "cag":"lee"})
+ params.should.eql "president=laura&cag=lee"
+ end
+
+ it 'should return a blank string when the object is empty'
+ var params = CouchDB.params({})
+ params.should.eql ""
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/couch_js_instance_methods_1_spec.js b/share/www/spec/couch_js_instance_methods_1_spec.js
new file mode 100644
index 00000000..7f23bd2c
--- /dev/null
+++ b/share/www/spec/couch_js_instance_methods_1_spec.js
@@ -0,0 +1,311 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for couch.js lines 1-130
+
+describe 'CouchDB instance'
+ before_each
+ db = new CouchDB("spec_db", {"X-Couch-Full-Commit":"false"});
+ end
+
+ describe '.request'
+ before_each
+ db.createDb();
+ end
+
+ after_each
+ db.deleteDb();
+ end
+
+ it 'should return a XMLHttpRequest'
+ var req = db.request("GET", "/spec_db");
+ req.should.include "readyState"
+ req.should.include "responseText"
+ req.should.include "statusText"
+ // in Safari a XMLHttpRequest is actually a XMLHttpRequestConstructor,
+ // otherwise we could just do:
+ // req.should.be_a XMLHttpRequest
+ end
+
+ it 'should set the options the CouchDB instance has got as httpHeaders'
+ CouchDB.should.receive("request", "once").with_args("GET", "/spec_db", {headers: {"X-Couch-Full-Commit": "false"}})
+ db.request("GET", "/spec_db");
+ end
+
+ it 'should pass through the options'
+ CouchDB.should.receive("request", "once").with_args("GET", "/spec_db", {"X-Couch-Persist": "true", headers: {"X-Couch-Full-Commit": "false"}})
+ db.request("GET", "/spec_db", {"X-Couch-Persist":"true"});
+ end
+ end
+
+ describe '.createDb'
+ after_each
+ db.deleteDb();
+ end
+
+ it 'should create the db'
+ db.createDb();
+ db.last_req.status.should.eql 201
+ end
+
+ it 'should return the ok true'
+ db.createDb().should.eql {"ok" : true}
+ end
+
+ it 'should result in a created db'
+ db.createDb();
+ try{
+ db.createDb();
+ } catch(e) {
+ e.error.should.eql "file_exists"
+ }
+ end
+
+ it 'should have create a db with update sequence 0'
+ db.createDb();
+ db.info().update_seq.should.eql 0
+ end
+ end
+
+ describe '.deleteDb'
+ before_each
+ db.createDb();
+ end
+
+ it 'should delete the db'
+ db.deleteDb();
+ db.last_req.status.should.eql 200
+ end
+
+ it 'should return the responseText of the request'
+ db.deleteDb().should.eql {"ok" : true}
+ end
+
+ it 'should result in a deleted db'
+ db.deleteDb();
+ db.deleteDb();
+ db.last_req.status.should.eql 404
+ end
+ end
+
+ describe 'document methods'
+ before_each
+ doc = {"Name" : "Kara Thrace", "Callsign" : "Starbuck"};
+ db.createDb();
+ end
+
+ after_each
+ db.deleteDb();
+ end
+
+ describe '.save'
+ it 'should save the document'
+ db.save(doc);
+ db.last_req.status.should.eql 201
+ end
+
+ it 'should return ok true'
+ db.save(doc).ok.should.be_true
+ end
+
+ it 'should return ID and revision of the document'
+ var response = db.save(doc);
+ response.id.should.be_a String
+ response.id.should.have_length 32
+ response.rev.should.be_a String
+ response.rev.length.should.be_at_least 30
+ end
+
+ it 'should result in a saved document with generated ID'
+ var response = db.save(doc);
+ var saved_doc = db.open(response.id);
+ saved_doc.Name.should.eql "Kara Thrace"
+ saved_doc.Callsign.should.eql "Starbuck"
+ end
+
+ it 'should save the document with the specified ID'
+ doc._id = "123";
+ var response = db.save(doc);
+ response.id.should.eql "123"
+ end
+
+ it 'should pass through the options'
+ doc._id = "123";
+ CouchDB.should.receive("request", "once").with_args("PUT", "/spec_db/123?batch=ok")
+ db.save(doc, {"batch" : "ok"});
+ end
+ end
+
+ describe '.open'
+ before_each
+ doc._id = "123";
+ db.save(doc);
+ end
+
+ it 'should open the document'
+ db.open("123").should.eql doc
+ end
+
+ it 'should return null when there is no document with the given ID'
+ db.open("non_existing").should.be_null
+ end
+
+ it 'should pass through the options'
+ CouchDB.should.receive("request", "once").with_args("GET", "/spec_db/123?revs=true")
+ db.open("123", {"revs" : "true"});
+ end
+ end
+
+ describe '.deleteDoc'
+ before_each
+ doc._id = "123";
+ saved_doc = db.save(doc);
+ delete_response = db.deleteDoc({_id : "123", _rev : saved_doc.rev});
+ delete_last_req = db.last_req;
+ db.open("123");
+ end
+
+ it 'should send a successful request'
+ delete_last_req.status.should.eql 200
+ end
+
+ it 'should result in a deleted document'
+ db.open("123").should.be_null
+ end
+
+ it 'should return ok true, the ID and the revision of the deleted document'
+ delete_response.ok.should.be_true
+ delete_response.id.should.eql "123"
+ delete_response.rev.should.be_a String
+ delete_response.rev.length.should.be_at_least 30
+ end
+
+ it 'should mark the document as deleted'
+ var responseText = db.request("GET", "/spec_db/123").responseText;
+ JSON.parse(responseText).should.eql {"error":"not_found","reason":"deleted"}
+ end
+
+ it 'should record the revision in the deleted document'
+ var responseText = db.request("GET", "/spec_db/123?rev=" + delete_response.rev).responseText;
+ var deleted_doc = JSON.parse(responseText);
+ deleted_doc._rev.should.eql delete_response.rev
+ deleted_doc._id.should.eql delete_response.id
+ deleted_doc._deleted.should.be_true
+ end
+ end
+
+ describe '.deleteDocAttachment'
+ before_each
+ doc._id = "123";
+ doc._attachments = {
+ "friend.txt" : {
+ "content_type": "text\/plain",
+ // base64 encoded
+ "data": "TGVlIEFkYW1hIGlzIGEgZm9ybWVyIENvbG9uaWFsIEZsZWV0IFJlc2VydmUgb2ZmaWNlci4="
+ }
+ };
+ saved_doc = db.save(doc);
+ end
+
+ it 'should be executed on a document with attachment'
+ db.open("123")._attachments.should.include "friend.txt"
+ db.open("123")._attachments["friend.txt"].stub.should.be_true
+ end
+
+ describe 'after delete'
+ before_each
+ delete_response = db.deleteDocAttachment({_id : "123", _rev : saved_doc.rev}, "friend.txt");
+ db.open("123");
+ end
+
+ it 'should send a successful request'
+ db.last_req.status.should.eql 200
+ end
+
+ it 'should leave the document untouched'
+ db.open("123").Callsign.should.eql "Starbuck"
+ end
+
+ it 'should result in a deleted document attachment'
+ db.open("123").should.not.include "_attachments"
+ end
+
+ it 'should record the revision in the document whose attachment has been deleted'
+ var responseText = db.request("GET", "/spec_db/123?rev=" + delete_response.rev).responseText;
+ var deleted_doc = JSON.parse(responseText);
+ deleted_doc._rev.should.eql delete_response.rev
+ deleted_doc._id.should.eql delete_response.id
+ end
+
+ it 'should return ok true, the ID and the revision of the document whose attachment has been deleted'
+ delete_response.ok.should.be_true
+ delete_response.id.should.eql "123"
+ delete_response.should.have_property 'rev'
+ end
+ end
+ end
+
+ describe '.bulkSave'
+ before_each
+ doc = {"Name" : "Kara Thrace", "Callsign" : "Starbuck"};
+ doc2 = {"Name" : "Karl C. Agathon", "Callsign" : "Helo"};
+ doc3 = {"Name" : "Sharon Valerii", "Callsign" : "Boomer"};
+ docs = [doc, doc2, doc3];
+ end
+
+ it 'should save the documents'
+ db.bulkSave(docs);
+ db.last_req.status.should.eql 201
+ end
+
+ it 'should return ID and revision of the documents'
+ var response = db.bulkSave(docs);
+ response[0].id.should.be_a String
+ response[0].id.should.have_length 32
+ response[0].rev.should.be_a String
+ response[0].rev.length.should.be_at_least 30
+ response[1].id.should.be_a String
+ response[1].id.should.have_length 32
+ response[1].rev.should.be_a String
+ response[1].rev.length.should.be_at_least 30
+ response[2].id.should.be_a String
+ response[2].id.should.have_length 32
+ response[2].rev.should.be_a String
+ response[2].rev.length.should.be_at_least 30
+ end
+
+ it 'should result in saved documents'
+ var response = db.bulkSave(docs);
+ db.open(response[0].id).Name.should.eql "Kara Thrace"
+ db.open(response[1].id).Name.should.eql "Karl C. Agathon"
+ db.open(response[2].id).Name.should.eql "Sharon Valerii"
+ end
+
+ it 'should save the document with specified IDs'
+ doc._id = "123";
+ doc2._id = "456";
+ docs = [doc, doc2, doc3];
+ var response = db.bulkSave(docs);
+ response[0].id.should.eql "123"
+ response[1].id.should.eql "456"
+ response[2].id.should.have_length 32
+ end
+
+ it 'should pass through the options'
+ doc._id = "123";
+ docs = [doc];
+ CouchDB.should.receive("request", "once").with_args("POST", "/spec_db/_bulk_docs", {body: '{"docs":[{"Name":"Kara Thrace","Callsign":"Starbuck","_id":"123"}],"batch":"ok"}'})
+ db.bulkSave(docs, {"batch" : "ok"});
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/couch_js_instance_methods_2_spec.js b/share/www/spec/couch_js_instance_methods_2_spec.js
new file mode 100644
index 00000000..76df6368
--- /dev/null
+++ b/share/www/spec/couch_js_instance_methods_2_spec.js
@@ -0,0 +1,246 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for couch.js lines 132-199
+
+describe 'CouchDB instance'
+ before_each
+ db = new CouchDB("spec_db", {"X-Couch-Full-Commit":"false"});
+ db.createDb();
+ end
+
+ after_each
+ db.deleteDb();
+ end
+
+ describe '.ensureFullCommit'
+ it 'should return ok true'
+ db.ensureFullCommit().ok.should.be_true
+ end
+
+ it 'should return the instance start time'
+ db.ensureFullCommit().instance_start_time.should.have_length 16
+ end
+
+ it 'should post _ensure_full_commit to the db'
+ db.should.receive("request", "once").with_args("POST", "/spec_db/_ensure_full_commit")
+ db.ensureFullCommit();
+ end
+ end
+
+ describe '.query'
+ before_each
+ db.save({"Name" : "Cally Tyrol", "job" : "deckhand", "_id" : "789"});
+ db.save({"Name" : "Felix Gaeta", "job" : "officer", "_id" : "123"});
+ db.save({"Name" : "Samuel T. Anders", "job" : "pilot", "_id" : "456"});
+ map_function = "function(doc) { emit(doc._id, 1); }";
+ reduce_function = "function(key, values, rereduce) { return sum(values); }";
+ end
+
+ it 'should apply the map function'
+ var result = db.query(map_function);
+
+ result.rows.should.have_length 3
+ result.rows[0].id.should.eql "123"
+ result.rows[0].key.should.eql "123"
+ result.rows[0].value.should.eql 1
+ result.rows[1].id.should.eql "456"
+ result.rows[1].key.should.eql "456"
+ result.rows[1].value.should.eql 1
+ result.rows[2].id.should.eql "789"
+ result.rows[2].key.should.eql "789"
+ result.rows[2].value.should.eql 1
+ end
+
+ it 'should apply the reduce function'
+ var result = db.query(map_function, reduce_function);
+
+ result.rows.should.have_length 1
+ result.rows[0].key.should.be_null
+ result.rows[0].value.should_eql 3
+ end
+
+ it 'should pass through the options'
+ var result = db.query(map_function, null, {"startkey":"456"});
+
+ result.rows.should.have_length 2
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.should.eql 1
+ result.rows[1].id.should.eql "789"
+ result.rows[1].key.should.eql "789"
+ result.rows[1].value.should.eql 1
+ end
+
+ it 'should pass through the keys'
+ var result = db.query(map_function, null, {}, ["456", "123"]);
+
+ result.rows.should.have_length 2
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.should.eql 1
+ result.rows[1].id.should.eql "123"
+ result.rows[1].key.should.eql "123"
+ result.rows[1].value.should.eql 1
+ end
+
+ it 'should pass through the options and the keys'
+ var result = db.query(map_function, null, {"include_docs":"true"}, ["456"]);
+
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.should.eql 1
+ result.rows[0].doc["job"].should.eql "pilot"
+ result.rows[0].doc["_rev"].length.should.be_at_least 30
+ end
+
+ it 'should apply a view in erlang also'
+ // when this test fails, read this: http://wiki.apache.org/couchdb/EnableErlangViews
+ var erlang_map = 'fun({Doc}) -> ' +
+ 'ID = proplists:get_value(<<"_id">>, Doc, null), ' +
+ 'Emit(ID, 1) ' +
+ 'end.';
+ var result = db.query(erlang_map, null, null, null, "erlang");
+
+ result.rows.should.have_length 3
+ result.rows[0].id.should.eql "123"
+ result.rows[0].key.should.eql "123"
+ result.rows[0].value.should.eql 1
+ result.rows[1].id.should.eql "456"
+ result.rows[1].key.should.eql "456"
+ result.rows[1].value.should.eql 1
+ result.rows[2].id.should.eql "789"
+ result.rows[2].key.should.eql "789"
+ result.rows[2].value.should.eql 1
+ end
+ end
+
+ describe '.view'
+ before_each
+ db.save({"Name" : "Cally Tyrol", "job" : "deckhand", "_id" : "789"});
+ db.save({"Name" : "Felix Gaeta", "job" : "officer", "_id" : "123"});
+ db.save({"Name" : "Samuel T. Anders", "job" : "pilot", "_id" : "456"});
+ view = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc.Name); }"
+ }
+ },
+ "_id" : "_design/spec_db"
+ };
+ db.save(view);
+ end
+
+ it 'should apply the view'
+ var result = db.view('spec_db/people');
+
+ result.rows.should.have_length 3
+ result.rows[0].id.should.eql "123"
+ result.rows[0].key.should.eql "123"
+ result.rows[0].value.should.eql "Felix Gaeta"
+ result.rows[1].id.should.eql "456"
+ result.rows[1].key.should.eql "456"
+ result.rows[1].value.should.eql "Samuel T. Anders"
+ result.rows[2].id.should.eql "789"
+ result.rows[2].key.should.eql "789"
+ result.rows[2].value.should.eql "Cally Tyrol"
+ end
+
+ it 'should pass through the options'
+ var result = db.view('spec_db/people', {"skip":"2"});
+
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "789"
+ result.rows[0].key.should.eql "789"
+ result.rows[0].value.should.eql "Cally Tyrol"
+ end
+
+ it 'should pass through the keys'
+ var result = db.view('spec_db/people', {}, ["456", "123"]);
+
+ result.rows.should.have_length 2
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.should.eql "Samuel T. Anders"
+ result.rows[1].id.should.eql "123"
+ result.rows[1].key.should.eql "123"
+ result.rows[1].value.should.eql "Felix Gaeta"
+ end
+
+ it 'should pass through the options and the keys'
+ var result = db.view('spec_db/people', {"include_docs":"true"}, ["456"]);
+
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.should.eql "Samuel T. Anders"
+ result.rows[0].doc["job"].should.eql "pilot"
+ result.rows[0].doc["_rev"].length.should.be_at_least 30
+ end
+
+ it 'should return null when the view doesnt exist'
+ var result = db.view('spec_db/non_existing_view');
+
+ result.should.be_null
+ end
+ end
+
+ describe '.info'
+ before_each
+ result = db.info();
+ end
+
+ it 'should return the name of the database'
+ result.db_name.should.eql "spec_db"
+ end
+
+ it 'should return the number of documents'
+ result.doc_count.should.eql 0
+ end
+
+ it 'should return the start time of the db instance'
+ result.instance_start_time.should.have_length 16
+ end
+ end
+
+ describe '.designInfo'
+ before_each
+ designDoc = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc); }"
+ }
+ },
+ "_id" : "_design/spec_db"
+ };
+ db.save(designDoc);
+ result = db.designInfo("_design/spec_db");
+ end
+
+ it 'should return the database name'
+ result.name.should.eql "spec_db"
+ end
+
+ it 'should return a views language'
+ result.view_index.language.should.eql "javascript"
+ end
+
+ it 'should return a views update sequence'
+ result.view_index.update_seq.should.eql 0
+ end
+
+ it 'should return a views signature'
+ result.view_index.signature.should.have_length 32
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/couch_js_instance_methods_3_spec.js b/share/www/spec/couch_js_instance_methods_3_spec.js
new file mode 100644
index 00000000..b7464c01
--- /dev/null
+++ b/share/www/spec/couch_js_instance_methods_3_spec.js
@@ -0,0 +1,215 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for couch.js lines 201-265
+
+describe 'CouchDB instance'
+ before_each
+ db = new CouchDB("spec_db", {"X-Couch-Full-Commit":"false"});
+ db.createDb();
+ end
+
+ after_each
+ db.deleteDb();
+ end
+
+ describe '.allDocs'
+ it 'should return no docs when there arent any'
+ db.allDocs().total_rows.should.eql 0
+ db.allDocs().rows.should.eql []
+ end
+
+ describe 'with docs'
+ before_each
+ db.save({"Name" : "Felix Gaeta", "_id" : "123"});
+ db.save({"Name" : "Samuel T. Anders", "_id" : "456"});
+ end
+
+ it 'should return all docs'
+ var result = db.allDocs();
+
+ result.total_rows.should.eql 2
+ result.rows.should.have_length 2
+ result.rows[0].id.should.eql "123"
+ result.rows[0].key.should.eql "123"
+ result.rows[0].value.rev.length.should.be_at_least 30
+ result.rows[1].id.should.eql "456"
+ end
+
+ it 'should pass through the options'
+ var result = db.allDocs({"startkey": "123", "limit": "1"});
+
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "123"
+ end
+
+ it 'should pass through the keys'
+ var result = db.allDocs({}, ["456"]);
+
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.rev.length.should.be_at_least 30
+ end
+
+ it 'should pass through the options and the keys'
+ var result = db.allDocs({"include_docs":"true"}, ["456"]);
+
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "456"
+ result.rows[0].key.should.eql "456"
+ result.rows[0].value.rev.length.should.be_at_least 30
+ result.rows[0].doc["Name"].should.eql "Samuel T. Anders"
+ result.rows[0].doc["_rev"].length.should.be_at_least 30
+ end
+
+ end
+ end
+
+ describe '.designDocs'
+ it 'should return nothing when there arent any design docs'
+ db.save({"Name" : "Felix Gaeta", "_id" : "123"});
+ db.designDocs().rows.should.eql []
+ end
+
+ it 'should return all design docs'
+ var designDoc = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc); }"
+ }
+ },
+ "_id" : "_design/spec_db"
+ };
+ db.save(designDoc);
+ db.save({"Name" : "Felix Gaeta", "_id" : "123"});
+
+ var result = db.designDocs();
+
+ result.total_rows.should.eql 2
+ result.rows.should.have_length 1
+ result.rows[0].id.should.eql "_design/spec_db"
+ result.rows[0].key.should.eql "_design/spec_db"
+ result.rows[0].value.rev.length.should.be_at_least 30
+ end
+ end
+
+ describe '.changes'
+ it 'should return no changes when there arent any'
+ db.changes().last_seq.should.eql 0
+ db.changes().results.should.eql []
+ end
+
+ describe 'with changes'
+ before_each
+ db.save({"Name" : "Felix Gaeta", "_id" : "123"});
+ db.save({"Name" : "Samuel T. Anders", "_id" : "456"});
+ end
+
+ it 'should return changes'
+ var result = db.changes();
+
+ result.last_seq.should.eql 2
+ result.results[0].id.should.eql "123"
+ result.results[0].seq.should.eql 1
+ result.results[0].changes[0].rev.length.should.be_at_least 30
+ result.results[1].id.should.eql "456"
+ result.results[1].seq.should.eql 2
+ result.results[1].changes[0].rev.length.should.be_at_least 30
+ end
+
+ it 'should pass through the options'
+ var result = db.changes({"since":"1"});
+
+ result.last_seq.should.eql 2
+ result.results[0].id.should.eql "456"
+ end
+ end
+ end
+
+ describe '.compact'
+ it 'should return ok true'
+ db.compact().ok.should.be_true
+ end
+
+ it 'should post _compact to the db'
+ db.should.receive("request", "once").with_args("POST", "/spec_db/_compact")
+ db.compact();
+ end
+ end
+
+ describe '.viewCleanup'
+ it 'should return ok true'
+ db.viewCleanup().ok.should.be_true
+ end
+
+ it 'should post _view_cleanup to the db'
+ db.should.receive("request", "once").with_args("POST", "/spec_db/_view_cleanup")
+ db.viewCleanup();
+ end
+ end
+
+ describe '.setDbProperty'
+ it 'should return ok true'
+ db.setDbProperty("_revs_limit", 1500).ok.should.be_true
+ end
+
+ it 'should set a db property'
+ db.setDbProperty("_revs_limit", 1500);
+ db.getDbProperty("_revs_limit").should.eql 1500
+ db.setDbProperty("_revs_limit", 1200);
+ db.getDbProperty("_revs_limit").should.eql 1200
+ end
+ end
+
+ describe '.getDbProperty'
+ it 'should get a db property'
+ db.setDbProperty("_revs_limit", 1200);
+ db.getDbProperty("_revs_limit").should.eql 1200
+ end
+
+ it 'should throw an error when the property doesnt exist'
+ function(){ db.getDbProperty("_doesnt_exist")}.should.throw_error
+ end
+ end
+
+ describe '.setSecObj'
+ it 'should return ok true'
+ db.setSecObj({"readers":{"names":["laura"],"roles":["president"]}}).ok.should.be_true
+ end
+
+ it 'should save a well formed object into the _security object '
+ db.should.receive("request", "once").with_args("PUT", "/spec_db/_security", {body: '{"readers":{"names":["laura"],"roles":["president"]}}'})
+ db.setSecObj({"readers": {"names" : ["laura"], "roles" : ["president"]}})
+ end
+
+ it 'should throw an error when the readers or admins object is malformed'
+ -{ db.setSecObj({"admins":["cylon"]}) }.should.throw_error
+ end
+
+ it 'should save any other object into the _security object'
+ db.setSecObj({"something" : "anything"})
+ db.getSecObj().should.eql {"something" : "anything"}
+ end
+ end
+
+ describe '.getSecObj'
+ it 'should get the security object'
+ db.setSecObj({"admins" : {"names" : ["bill"], "roles" : ["admiral"]}})
+ db.getSecObj().should.eql {"admins" : {"names": ["bill"], "roles": ["admiral"]}}
+ end
+
+ it 'should return an empty object when there is no security object'
+ db.getSecObj().should.eql {}
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/custom_helpers.js b/share/www/spec/custom_helpers.js
new file mode 100644
index 00000000..d29ee87b
--- /dev/null
+++ b/share/www/spec/custom_helpers.js
@@ -0,0 +1,51 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+function stubAlert(){
+ if(typeof(old_alert) == 'undefined'){
+ old_alert = alert;
+ }
+ alert = function(msg){
+ alert_msg = msg;
+ };
+}
+
+function destubAlert(){
+ alert = old_alert;
+}
+
+function errorCallback(status, error, reason){
+ console.log("Unexpected " + status + " error: " + error + " - " + reason)
+ throw("Unexpected " + status + " error: " + error + " - " + reason);
+}
+
+function successCallback(resp){
+ console.log("No error message here unexpectedly, successful response instead.")
+ throw("No error message here unexpectedly, successful response instead.");
+}
+
+function useTestUserDb(){
+ users_db = new CouchDB("spec_users_db");
+ var xhr = CouchDB.request("PUT", "/_config/couch_httpd_auth/authentication_db", {
+ body: JSON.stringify("spec_users_db")
+ });
+ if(typeof(old_auth_db) == 'undefined'){
+ old_auth_db = xhr.responseText.replace(/\n/,'').replace(/"/g,'');
+ }
+}
+
+function useOldUserDb(){
+ CouchDB.request("PUT", "/_config/couch_httpd_auth/authentication_db", {
+ body: JSON.stringify(old_auth_db)
+ });
+ users_db.deleteDb();
+} \ No newline at end of file
diff --git a/share/www/spec/jquery_couch_js_class_methods_spec.js b/share/www/spec/jquery_couch_js_class_methods_spec.js
new file mode 100644
index 00000000..f2df81b3
--- /dev/null
+++ b/share/www/spec/jquery_couch_js_class_methods_spec.js
@@ -0,0 +1,523 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for jquery_couch.js lines 48-156 and 415-448
+
+describe 'jQuery couchdb'
+ before
+ stubAlert();
+ end
+
+ after
+ destubAlert();
+ end
+
+ describe 'activeTasks'
+ before_each
+ db = $.couch.db("spec_db");
+ db.create();
+ end
+
+ after_each
+ db.drop();
+ end
+
+ it 'should return an empty array when there are no active tasks'
+ $.couch.activeTasks({
+ success: function(resp){
+ resp.should.eql []
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return an active task'
+ // doing a bit of stuff here so compaction has something to do and takes a while
+ var battlestar, civillian;
+ db.saveDoc({"type":"Battlestar", "name":"Galactica"}, {
+ success: function(resp){
+ db.openDoc(resp.id, {
+ success: function(resp2){
+ battlestar = resp2;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ battlestar.name = "Pegasus";
+ db.saveDoc(battlestar);
+
+ db.saveDoc({"type":"Civillian", "name":"Cloud 9"}, {
+ success: function(resp){
+ db.openDoc(resp.id, {
+ success: function(resp2){
+ civillian = resp2;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ civillian.name = "Olympic Carrier";
+ db.saveDoc(civillian);
+ db.removeDoc(civillian);
+
+ db.compact({
+ ajaxStart: function(resp){
+ $.couch.activeTasks({
+ success: function(resp2){
+ resp2[0].type.should.eql "Database Compaction"
+ resp2[0].task.should.eql "spec_db"
+ resp2[0].should.have_prop "status"
+ resp2[0].should.include "pid"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ }
+ });
+ end
+ end
+
+ describe 'allDbs'
+ it 'should return an array that includes a created database'
+ temp_db = new CouchDB("temp_spec_db", {"X-Couch-Full-Commit":"false"});
+ temp_db.createDb();
+ $.couch.allDbs({
+ success: function(resp){
+ resp.should.include "temp_spec_db"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ temp_db.deleteDb();
+ end
+
+ it 'should return an array that does not include a database that does not exist'
+ $.couch.allDbs({
+ success: function(resp){
+ resp.should.not.include("not_existing_temp_spec_db");
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'config'
+ it 'should get the config settings'
+ $.couch.config({
+ success: function(resp){
+ resp.httpd.port.should.eql window.location.port
+ resp.stats.samples.should.match /\[.*\]/
+ resp.native_query_servers.should.have_prop "erlang"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should get a specific config setting'
+ $.couch.config({
+ success: function(resp){
+ parseInt(resp.max_document_size).should.be_a Number
+ resp.delayed_commits.should.be_a String
+ resp.database_dir.should.be_a String
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, "couchdb");
+ end
+
+ it 'should update a config setting'
+ $.couch.config({
+ success: function(resp){
+ resp.should.eql ""
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, "test", "colony", "Caprica");
+
+ $.couch.config({
+ success: function(resp){
+ resp.colony.should.eql "Caprica"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, "test");
+
+ $.couch.config({}, "test", "colony", null);
+ end
+
+ it 'should delete a config setting'
+ $.couch.config({}, "test", "colony", "Caprica");
+
+ $.couch.config({
+ success: function(resp){
+ resp.should.eql "Caprica"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, "test", "colony", null);
+
+ $.couch.config({
+ success: function(resp){
+ resp.should.eql {}
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, "test");
+ end
+
+ it 'should alert with an error message prefix'
+ $.couch.config("asdf", "asdf", "asdf");
+ alert_msg.should.match /An error occurred retrieving\/updating the server configuration/
+ end
+ end
+
+ describe 'session'
+ it 'should return information about the session'
+ $.couch.session({
+ success: function(resp){
+ resp.info.should.have_prop 'authentication_db'
+ resp.userCtx.should.include 'name'
+ resp.userCtx.roles.should.be_an Array
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'userDb'
+ it 'should return the userDb'
+ var authentication_db;
+ $.couch.session({
+ success: function(resp){
+ authentication_db = resp.info.authentication_db;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+
+ $.couch.userDb(function(resp){
+ resp.name.should.eql authentication_db
+ });
+ end
+
+ it 'should return a db instance'
+ $.couch.userDb(function(resp){
+ resp.should.respond_to 'allDocs'
+ resp.should.respond_to 'bulkSave'
+ });
+ end
+ end
+
+ describe 'user_db stuff'
+ before
+ useTestUserDb();
+ end
+
+ after
+ useOldUserDb();
+ end
+
+ describe 'signup'
+ it 'should return a saved user'
+ $.couch.signup(
+ {name: "Tom Zarek"}, "secretpass", {
+ success: function(resp){
+ resp.id.should.eql "org.couchdb.user:Tom Zarek"
+ resp.rev.length.should.be_at_least 30
+ resp.ok.should.be_true
+ users_db.deleteDoc({_id : resp.id, _rev : resp.rev})
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should create a userDoc in the user db'
+ $.couch.signup(
+ {name: "Tom Zarek"}, "secretpass", {
+ success: function(resp){
+ var user = users_db.open(resp.id);
+ user.name.should.eql "Tom Zarek"
+ user._id.should.eql "org.couchdb.user:Tom Zarek"
+ user.roles.should.eql []
+ user.password_sha.length.should.be_at_least 30
+ user.password_sha.should.be_a String
+ users_db.deleteDoc({_id : resp.id, _rev : resp.rev})
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should create a userDoc with roles when specified'
+ $.couch.signup(
+ {name: "Tom Zarek", roles: ["vice_president", "activist"]}, "secretpass", {
+ success: function(resp){
+ var user = users_db.open(resp.id);
+ user.roles.should.eql ["vice_president", "activist"]
+ users_db.deleteDoc({_id : resp.id, _rev : resp.rev})
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'login'
+ before_each
+ user = {};
+ $.couch.signup({name: "Tom Zarek", roles: ["vice_president", "activist"]}, "secretpass", {
+ success: function(resp){
+ user.id = resp.id;
+ user.rev = resp.rev;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ after_each
+ users_db.deleteDoc({_id : user.id, _rev : user.rev})
+ end
+
+ it 'should return the logged in user'
+ $.couch.login({
+ name: "Tom Zarek",
+ password: "secretpass",
+ success: function(resp){
+ resp.name.should.eql "Tom Zarek"
+ resp.ok.should.be_true
+ resp.roles.should.eql ["vice_president", "activist"]
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in a session for the logged in user'
+ $.couch.login({
+ name: "Tom Zarek",
+ password: "secretpass"
+ });
+ $.couch.session({
+ success: function(resp){
+ resp.info.authentication_db.should.eql "spec_users_db"
+ resp.userCtx.name.should.eql "Tom Zarek"
+ resp.userCtx.roles.should.eql ["vice_president", "activist"]
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return a 404 when password is wrong'
+ $.couch.login({
+ name: "Tom Zarek",
+ password: "wrongpass",
+ error: function(status, error, reason){
+ status.should.eql 401
+ error.should.eql "unauthorized"
+ reason.should.eql "Name or password is incorrect."
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should return a 404 when the user doesnt exist in the users db'
+ $.couch.login({
+ name: "Number Three",
+ password: "secretpass",
+ error: function(status, error, reason){
+ status.should.eql 401
+ error.should.eql "unauthorized"
+ reason.should.eql "Name or password is incorrect."
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ $.couch.login("asdf");
+ alert_msg.should.match /An error occurred logging in/
+ end
+ end
+
+ describe 'logout'
+ before_each
+ user = {};
+ $.couch.signup({name: "Tom Zarek", roles: ["vice_president", "activist"]}, "secretpass", {
+ success: function(resp){
+ user.id = resp.id;
+ user.rev = resp.rev;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ $.couch.login({name: "Tom Zarek", password: "secretpass"});
+ end
+
+ after_each
+ users_db.deleteDoc({_id : user.id, _rev : user.rev})
+ end
+
+ it 'should return ok true'
+ $.couch.logout({
+ success: function(resp){
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in an empty session'
+ $.couch.logout();
+ $.couch.session({
+ success: function(resp){
+ resp.userCtx.name.should.be_null
+ resp.userCtx.roles.should.not.include ["vice_president"]
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+ end
+
+ describe 'encodeDocId'
+ it 'should return the encoded docID when it is not a design document'
+ $.couch.encodeDocId("viper").should.eql(encodeURIComponent("viper"))
+ end
+
+ it 'should encode only the name of the design document'
+ $.couch.encodeDocId("_design/raptor").should.eql("_design/" + encodeURIComponent("raptor"))
+ end
+
+ it 'should also work when the name of the des'
+ $.couch.encodeDocId("_design/battlestar/_view/crew").should.eql("_design/" + encodeURIComponent("battlestar/_view/crew"))
+ end
+ end
+
+ describe 'info'
+ it 'should return the CouchDB version'
+ $.couch.info({
+ success: function(resp){
+ resp.couchdb.should.eql "Welcome"
+ resp.version.should_match /^\d\d?\.\d\d?\.\d\d?.*/
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'replicate'
+ before_each
+ db = $.couch.db("spec_db");
+ db.create();
+ db2 = $.couch.db("spec_db_2");
+ db2.create();
+ host = window.location.protocol + "//" + window.location.host ;
+ end
+
+ after_each
+ db.drop();
+ db2.drop();
+ end
+
+ it 'should return no_changes true when there are no changes between the dbs'
+ $.couch.replicate(host + db.uri, host + db2.uri, {
+ success: function(resp){
+ resp.ok.should.be_true
+ resp.no_changes.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return the session ID'
+ db.saveDoc({'type':'battlestar', 'name':'galactica'});
+ $.couch.replicate(host + db.uri, host + db2.uri, {
+ success: function(resp){
+ resp.session_id.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return source_last_seq'
+ db.saveDoc({'type':'battlestar', 'name':'galactica'});
+ db.saveDoc({'type':'battlestar', 'name':'pegasus'});
+
+ $.couch.replicate(host + db.uri, host + db2.uri, {
+ success: function(resp){
+ resp.source_last_seq.should.eql 2
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return the replication history'
+ db.saveDoc({'type':'battlestar', 'name':'galactica'});
+ db.saveDoc({'type':'battlestar', 'name':'pegasus'});
+
+ $.couch.replicate(host + db.uri, host + db2.uri, {
+ success: function(resp){
+ resp.history[0].docs_written.should.eql 2
+ resp.history[0].start_last_seq.should.eql 0
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through replication options'
+ db.saveDoc({'type':'battlestar', 'name':'galactica'});
+ db2.drop();
+ $.couch.replicate(host + db.uri, host + db2.uri, {
+ error: function(status, error, reason){
+ status.should.eql 500
+ reason.should.match /db_not_found/
+ },
+ success: function(resp){successCallback(resp)}
+ });
+
+ $.couch.replicate(host + db.uri, host + db2.uri, {
+ success: function(resp){
+ resp.ok.should.eql true
+ resp.history[0].docs_written.should.eql 1
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, {
+ "create_target":true
+ });
+
+ db2.info({
+ success: function(resp){
+ resp.db_name.should.eql "spec_db_2"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ $.couch.replicate("asdf");
+ alert_msg.should.match /Replication failed/
+ end
+ end
+
+ describe 'newUUID'
+ it 'should return a new UUID'
+ var new_uuid = $.couch.newUUID(1);
+ new_uuid.should.be_a String
+ new_uuid.should.have_length 32
+ end
+
+ it 'should fill the uuidCache with the specified number minus 1'
+ // we can't reach the uuidCache from here, so we mock the next request
+ // to test that the next uuid is not coming from the request, but from the cache.
+ $.couch.newUUID(2);
+ mock_request().and_return({'uuids':['a_sample_uuid']})
+ $.couch.newUUID(1).should.not.eql 'a_sample_uuid'
+ $.couch.newUUID(1).should.eql 'a_sample_uuid'
+ end
+
+ it 'should alert with an error message prefix'
+ $.couch.newUUID("asdf");
+ alert_msg.should.match /Failed to retrieve UUID batch/
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/jquery_couch_js_instance_methods_1_spec.js b/share/www/spec/jquery_couch_js_instance_methods_1_spec.js
new file mode 100644
index 00000000..8538c856
--- /dev/null
+++ b/share/www/spec/jquery_couch_js_instance_methods_1_spec.js
@@ -0,0 +1,202 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for jquery_couch.js lines 163-209
+
+describe 'jQuery couchdb db'
+ before
+ stubAlert();
+ end
+
+ after
+ destubAlert();
+ end
+
+ before_each
+ db = $.couch.db('spec_db');
+ end
+
+ describe 'constructor'
+ it 'should set the name'
+ db.name.should.eql 'spec_db'
+ end
+
+ it 'should set the uri'
+ db.uri.should.eql '/spec_db/'
+ end
+ end
+
+ describe 'triggering db functions'
+ before_each
+ db.create();
+ end
+
+ after_each
+ db.drop();
+ end
+
+ describe 'compact'
+ it 'should return ok true'
+ db.compact({
+ success: function(resp) {
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should trigger _compact'
+ db.compact({
+ success: function(resp, obj) {
+ obj.url.should.eql "/spec_db/_compact"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'viewCleanup'
+ it 'should return ok true'
+ db.viewCleanup({
+ success: function(resp) {
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should trigger _view_cleanup'
+ db.viewCleanup({
+ success: function(resp, obj) {
+ obj.url.should.eql "/spec_db/_view_cleanup"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'compactView'
+ before_each
+ var designDoc = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc); }"
+ }
+ },
+ "_id" : "_design/myview"
+ };
+ db.saveDoc(designDoc);
+ db.saveDoc({"Name" : "Felix Gaeta", "_id" : "123"});
+ end
+
+ it 'should return ok true'
+ db.compactView("myview", {
+ success: function(resp) {
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should trigger _compact_view with the groupname'
+ db.compactView("myview", {
+ success: function(resp, obj) {
+ obj.url.should.eql "/spec_db/_compact/myview"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return raise a 404 error when the design name doesnt exist'
+ db.compactView("non_existing_design_name", {
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "missing"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.compactView("asdf");
+ alert_msg.should.match /The view could not be compacted/
+ end
+ end
+ end
+
+ describe 'create'
+ after_each
+ db.drop();
+ end
+
+ it 'should return ok true'
+ db.create({
+ success: function(resp) {
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in a created db'
+ db.create();
+ db.create({
+ error: function(status, error, reason){
+ status.should.eql 412
+ error.should.eql "file_exists"
+ reason.should.eql "The database could not be created, the file already exists."
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.create();
+ db.create();
+ alert_msg.should.match /The database could not be created/
+ end
+ end
+
+ describe 'drop'
+ before_each
+ db.create();
+ end
+
+ it 'should return ok true'
+ db.drop({
+ success: function(resp) {
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in a deleted db'
+ db.drop();
+ db.drop({
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "missing"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.drop();
+ db.drop();
+ alert_msg.should.match /The database could not be deleted/
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/jquery_couch_js_instance_methods_2_spec.js b/share/www/spec/jquery_couch_js_instance_methods_2_spec.js
new file mode 100644
index 00000000..8f35affa
--- /dev/null
+++ b/share/www/spec/jquery_couch_js_instance_methods_2_spec.js
@@ -0,0 +1,433 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for jquery_couch.js lines 210-299
+
+describe 'jQuery couchdb db'
+ before
+ stubAlert();
+ end
+
+ after
+ destubAlert();
+ end
+
+ before_each
+ db = $.couch.db('spec_db');
+ db.create();
+ end
+
+ after_each
+ db.drop();
+ end
+
+ describe 'info'
+ before_each
+ result = {};
+ db.info({
+ success: function(resp) { result = resp; },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return the name of the database'
+ result.db_name.should.eql "spec_db"
+ end
+
+ it 'should return the number of documents'
+ result.doc_count.should.eql 0
+ end
+
+ it 'should return the start time of the db instance'
+ result.instance_start_time.should.have_length 16
+ end
+ end
+
+ describe 'allDocs'
+ it 'should return no docs when there arent any'
+ db.allDocs({
+ success: function(resp) {
+ resp.total_rows.should.eql 0
+ resp.rows.should.eql []
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ describe 'with docs'
+ before_each
+ db.saveDoc({"Name" : "Felix Gaeta", "_id" : "123"});
+ db.saveDoc({"Name" : "Samuel T. Anders", "_id" : "456"});
+ end
+
+ it 'should return all docs'
+ db.allDocs({
+ success: function(resp) {
+ resp.total_rows.should.eql 2
+ resp.rows.should.have_length 2
+ resp.rows[0].id.should.eql "123"
+ resp.rows[0].key.should.eql "123"
+ resp.rows[0].value.rev.length.should.be_at_least 30
+ resp.rows[1].id.should.eql "456"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options'
+ db.allDocs({
+ "startkey": "123",
+ "limit": "1",
+ success: function(resp) {
+ resp.rows.should.have_length 1
+ resp.rows[0].id.should.eql "123"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+ end
+
+ describe 'allDesignDocs'
+ it 'should return nothing when there arent any design docs'
+ db.saveDoc({"Name" : "Felix Gaeta", "_id" : "123"});
+ db.allDesignDocs({
+ success: function(resp) {
+ resp.rows.should.eql []
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return all design docs'
+ var designDoc = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc); }"
+ }
+ },
+ "_id" : "_design/spec_db"
+ };
+ db.saveDoc(designDoc);
+ db.saveDoc({"Name" : "Felix Gaeta", "_id" : "123"});
+
+ db.allDesignDocs({
+ success: function(resp) {
+ resp.total_rows.should.eql 2
+ resp.rows.should.have_length 1
+ resp.rows[0].id.should.eql "_design/spec_db"
+ resp.rows[0].key.should.eql "_design/spec_db"
+ resp.rows[0].value.rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+ end
+
+ describe 'allApps'
+ it 'should provide a custom function with appName, appPath and design document when there is an attachment with index.html'
+ var designDoc = {"_id" : "_design/with_attachments"};
+
+ designDoc._attachments = {
+ "index.html" : {
+ "content_type": "text\/html",
+ // this is "<html><p>Hi, here is index!</p></html>", base64 encoded
+ "data": "PGh0bWw+PHA+SGksIGhlcmUgaXMgaW5kZXghPC9wPjwvaHRtbD4="
+ }
+ };
+ db.saveDoc(designDoc);
+
+ db.allApps({
+ eachApp: function(appName, appPath, ddoc) {
+ appName.should.eql "with_attachments"
+ appPath.should.eql "/spec_db/_design/with_attachments/index.html"
+ ddoc._id.should.eql "_design/with_attachments"
+ ddoc._attachments["index.html"].content_type.should.eql "text/html"
+ ddoc._attachments["index.html"].length.should.eql "<html><p>Hi, here is index!</p></html>".length
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should provide a custom function with appName, appPath and design document when there is a couchapp with index file'
+ var designDoc = {"_id" : "_design/with_index"};
+ designDoc.couchapp = {
+ "index" : "cylon"
+ };
+ db.saveDoc(designDoc);
+
+ db.allApps({
+ eachApp: function(appName, appPath, ddoc) {
+ appName.should.eql "with_index"
+ appPath.should.eql "/spec_db/_design/with_index/cylon"
+ ddoc._id.should.eql "_design/with_index"
+ ddoc.couchapp.index.should.eql "cylon"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should not call the eachApp function when there is neither index.html in _attachments nor a couchapp index file'
+ var designDoc = {"_id" : "_design/nothing"};
+ db.saveDoc(designDoc);
+
+ var eachApp_called = false;
+ db.allApps({
+ eachApp: function(appName, appPath, ddoc) {
+ eachApp_called = true;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+
+ eachApp_called.should.be_false
+ end
+
+ it 'should alert with an error message prefix'
+ db.allApps();
+ alert_msg.should.match /Please provide an eachApp function for allApps()/
+ end
+ end
+
+ describe 'openDoc'
+ before_each
+ doc = {"Name" : "Louanne Katraine", "Callsign" : "Kat", "_id" : "123"};
+ db.saveDoc(doc);
+ end
+
+ it 'should open the document'
+ db.openDoc("123", {
+ success: function(resp){
+ resp.should.eql doc
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should raise a 404 error when there is no document with the given ID'
+ db.openDoc("non_existing", {
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "missing"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should pass through the options'
+ doc.Name = "Sasha";
+ db.saveDoc(doc);
+ db.openDoc("123", {
+ revs: true,
+ success: function(resp){
+ resp._revisions.start.should.eql 2
+ resp._revisions.ids.should.have_length 2
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.openDoc("asdf");
+ alert_msg.should.match /The document could not be retrieved/
+ end
+ end
+
+ describe 'saveDoc'
+ before_each
+ doc = {"Name" : "Kara Thrace", "Callsign" : "Starbuck"};
+ end
+
+ it 'should save the document'
+ db.saveDoc(doc, {
+ success: function(resp, status){
+ status.should.eql 201
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return ok true'
+ db.saveDoc(doc, {
+ success: function(resp, status){
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return ID and revision of the document'
+ db.saveDoc(doc, {
+ success: function(resp, status){
+ resp.id.should.be_a String
+ resp.id.should.have_length 32
+ resp.rev.should.be_a String
+ resp.rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in a saved document with generated ID'
+ db.saveDoc(doc, {
+ success: function(resp, status){
+ db.openDoc(resp.id, {
+ success: function(resp2){
+ resp2.Name.should.eql "Kara Thrace"
+ resp2.Callsign.should.eql "Starbuck"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should save the document with the specified ID'
+ doc._id = "123";
+ db.saveDoc(doc, {
+ success: function(resp, status){
+ resp.id.should.eql "123"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options'
+ db.saveDoc(doc, {
+ "batch" : "ok",
+ success: function(resp, status){
+ // when using batch ok, couch sends a 202 status immediately
+ status.should.eql 202
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.saveDoc("asdf");
+ alert_msg.should.match /The document could not be saved/
+ end
+ end
+
+ describe 'bulkSave'
+ before_each
+ doc = {"Name" : "Kara Thrace", "Callsign" : "Starbuck"};
+ doc2 = {"Name" : "Karl C. Agathon", "Callsign" : "Helo"};
+ doc3 = {"Name" : "Sharon Valerii", "Callsign" : "Boomer"};
+ docs = [doc, doc2, doc3];
+ end
+
+ it 'should save all documents'
+ db.bulkSave({"docs": docs});
+ db.allDocs({
+ success: function(resp) {
+ resp.total_rows.should.eql 3
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in saved documents'
+ doc3._id = "789";
+ db.bulkSave({"docs": [doc3]});
+
+ db.openDoc("789", {
+ success: function(resp){
+ resp.Name.should.eql "Sharon Valerii"
+ resp.Callsign.should.eql "Boomer"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return ID and revision of the documents'
+ db.bulkSave({"docs": docs},{
+ success: function(resp){
+ resp[0].id.should.be_a String
+ resp[0].id.should.have_length 32
+ resp[0].rev.should.be_a String
+ resp[0].rev.length.should.be_at_least 30
+ resp[1].id.should.be_a String
+ resp[1].id.should.have_length 32
+ resp[1].rev.should.be_a String
+ resp[1].rev.length.should.be_at_least 30
+ resp[2].id.should.be_a String
+ resp[2].id.should.have_length 32
+ resp[2].rev.should.be_a String
+ resp[2].rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should save the document with specified IDs'
+ doc._id = "123";
+ doc2._id = "456";
+ docs = [doc, doc2, doc3];
+
+ db.bulkSave({"docs": docs},{
+ success: function(resp){
+ resp[0].id.should.eql "123"
+ resp[1].id.should.eql "456"
+ resp[2].id.should.have_length 32
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options'
+ // a lengthy way to test that a conflict can't be created with the
+ // all_or_nothing option set to false, but can be when it's true.
+
+ var old_doc = {"Name" : "Louanne Katraine", "Callsign" : "Kat", "_id" : "123"};
+ db.saveDoc(old_doc, {
+ success: function(resp){
+ old_doc._rev = resp.rev;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+
+ var new_doc = {"Name" : "Sasha", "Callsign" : "Kat", "_id" : "123"};
+
+ db.bulkSave({"docs": [new_doc], "all_or_nothing": false}, {
+ success: function(resp){
+ resp[0].id.should.eql "123"
+ resp[0].error.should.eql "conflict"
+ resp[0].reason.should.eql "Document update conflict."
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+
+ db.bulkSave({"docs": [new_doc], "all_or_nothing": true}, {
+ success: function(resp){
+ resp[0].id.should.eql "123"
+ resp[0].rev.should.not.eql old_doc._rev
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+
+ db.openDoc("123", {
+ "conflicts": true,
+ success: function(resp){
+ resp._conflicts[0].should.eql old_doc._rev
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.bulkSave("asdf");
+ alert_msg.should.match /The documents could not be saved/
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/jquery_couch_js_instance_methods_3_spec.js b/share/www/spec/jquery_couch_js_instance_methods_3_spec.js
new file mode 100644
index 00000000..5d27d817
--- /dev/null
+++ b/share/www/spec/jquery_couch_js_instance_methods_3_spec.js
@@ -0,0 +1,540 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// Specs for jquery_couch.js lines 300-411
+
+describe 'jQuery couchdb db'
+ before
+ stubAlert();
+ end
+
+ after
+ destubAlert();
+ end
+
+ before_each
+ db = $.couch.db('spec_db');
+ db.create();
+ end
+
+ after_each
+ db.drop();
+ end
+
+ describe 'removeDoc'
+ before_each
+ doc = {"Name" : "Louanne Katraine", "Callsign" : "Kat", "_id" : "345"};
+ saved_doc = {};
+ db.saveDoc(doc, {
+ success: function(resp){
+ saved_doc = resp;
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in a deleted document'
+ db.removeDoc({_id : "345", _rev : saved_doc.rev}, {
+ success: function(resp){
+ db.openDoc("345", {
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "deleted"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return ok true, the ID and the revision of the deleted document'
+ db.removeDoc({_id : "345", _rev : saved_doc.rev}, {
+ success: function(resp){
+ resp.ok.should.be_true
+ resp.id.should.eql "345"
+ resp.rev.should.be_a String
+ resp.rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should record the revision in the deleted document'
+ db.removeDoc({_id : "345", _rev : saved_doc.rev}, {
+ success: function(resp){
+ db.openDoc("345", {
+ rev: resp.rev,
+ success: function(resp2){
+ resp2._rev.should.eql resp.rev
+ resp2._id.should.eql resp.id
+ resp2._deleted.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.removeDoc({_id: "asdf"});
+ alert_msg.should.match /The document could not be deleted/
+ end
+ end
+
+ describe 'bulkRemove'
+ before_each
+ doc = {"Name" : "Kara Thrace", "Callsign" : "Starbuck", "_id" : "123"};
+ doc2 = {"Name" : "Karl C. Agathon", "Callsign" : "Helo", "_id" : "456"};
+ doc3 = {"Name" : "Sharon Valerii", "Callsign" : "Boomer", "_id" : "789"};
+ docs = [doc, doc2, doc3];
+
+ db.bulkSave({"docs": docs}, {
+ success: function(resp){
+ for (var i = 0; i < docs.length; i++) {
+ docs[i]._rev = resp[i].rev;
+ }
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should remove all documents specified'
+ db.bulkRemove({"docs": docs});
+ db.allDocs({
+ success: function(resp) {
+ resp.total_rows.should.eql 0
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should not remove documents that should not have been deleted'
+ db.bulkRemove({"docs": [doc3]});
+ db.allDocs({
+ success: function(resp) {
+ resp.total_rows.should.eql 2
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should result in deleted documents'
+ db.bulkRemove({"docs": docs}, {
+ success: function(resp){
+ db.openDoc("123", {
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "deleted"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should return the ID and the revision of the deleted documents'
+ db.bulkRemove({"docs": docs}, {
+ success: function(resp){
+ resp[0].id.should.eql "123"
+ resp[0].rev.should.be_a String
+ resp[0].rev.length.should.be_at_least 30
+ resp[1].id.should.eql "456"
+ resp[1].rev.should.be_a String
+ resp[1].rev.length.should.be_at_least 30
+ resp[2].id.should.eql "789"
+ resp[2].rev.should.be_a String
+ resp[2].rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should record the revision in the deleted documents'
+ db.bulkRemove({"docs": docs}, {
+ success: function(resp){
+ db.openDoc("123", {
+ rev: resp[0].rev,
+ success: function(resp2){
+ resp2._rev.should.eql resp[0].rev
+ resp2._id.should.eql resp[0].id
+ resp2._deleted.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.bulkRemove({docs: ["asdf"]});
+ alert_msg.should.match /The documents could not be deleted/
+ end
+ end
+
+ describe 'copyDoc'
+ before_each
+ doc = {"Name" : "Sharon Agathon", "Callsign" : "Athena", "_id" : "123"};
+ db.saveDoc(doc);
+ end
+
+ it 'should result in another document with same data and new id'
+ db.copyDoc("123", {
+ success: function(resp){
+ resp.id.should.eql "456"
+ resp.rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, {
+ headers: {"Destination":"456"}
+ });
+
+ db.openDoc("456", {
+ success: function(resp){
+ resp.Name.should.eql "Sharon Agathon"
+ resp.Callsign.should.eql "Athena"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should throw an error when trying to overwrite a document without providing a revision'
+ doc2 = {"Name" : "Louanne Katraine", "Callsign" : "Kat", "_id" : "456"};
+ db.saveDoc(doc2);
+
+ db.copyDoc("123", {
+ error: function(status, error, reason){
+ status.should.eql 409
+ error.should.eql "conflict"
+ reason.should.eql "Document update conflict."
+ },
+ success: function(resp){successCallback(resp)}
+ }, {
+ headers: {"Destination":"456"}
+ });
+ end
+
+ it 'should overwrite a document with the correct revision'
+ doc2 = {"Name" : "Louanne Katraine", "Callsign" : "Kat", "_id" : "456"};
+ var doc2_rev;
+ db.saveDoc(doc2, {
+ success: function(resp){
+ doc2_rev = resp.rev;
+ }
+ });
+
+ db.copyDoc("123", {
+ success: function(resp){
+ resp.id.should.eql "456"
+ resp.rev.length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ }, {
+ headers: {"Destination":"456?rev=" + doc2_rev}
+ });
+
+ db.openDoc("456", {
+ success: function(resp){
+ resp.Name.should.eql "Sharon Agathon"
+ resp.Callsign.should.eql "Athena"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.copyDoc("asdf", {}, {});
+ alert_msg.should.match /The document could not be copied/
+ end
+ end
+
+ describe 'query'
+ before_each
+ db.saveDoc({"Name" : "Cally Tyrol", "job" : "deckhand", "_id" : "789"});
+ db.saveDoc({"Name" : "Felix Gaeta", "job" : "officer", "_id" : "123"});
+ db.saveDoc({"Name" : "Samuel T. Anders", "job" : "pilot", "_id" : "456"});
+ map_function = "function(doc) { emit(doc._id, 1); }";
+ reduce_function = "function(key, values, rereduce) { return sum(values); }";
+ end
+
+ it 'should apply the map function'
+ db.query(map_function, null, null, {
+ success: function(resp){
+ resp.rows.should.have_length 3
+ resp.rows[0].id.should.eql "123"
+ resp.rows[0].key.should.eql "123"
+ resp.rows[0].value.should.eql 1
+ resp.rows[1].id.should.eql "456"
+ resp.rows[1].key.should.eql "456"
+ resp.rows[1].value.should.eql 1
+ resp.rows[2].id.should.eql "789"
+ resp.rows[2].key.should.eql "789"
+ resp.rows[2].value.should.eql 1
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should apply the reduce function'
+ db.query(map_function, reduce_function, null, {
+ success: function(resp){
+ resp.rows.should.have_length 1
+ resp.rows[0].key.should.be_null
+ resp.rows[0].value.should_eql 3
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options'
+ db.query(map_function, null, null, {
+ "startkey": "456",
+ success: function(resp){
+ resp.rows.should.have_length 2
+ resp.rows[0].id.should.eql "456"
+ resp.rows[0].key.should.eql "456"
+ resp.rows[0].value.should.eql 1
+ resp.rows[1].id.should.eql "789"
+ resp.rows[1].key.should.eql "789"
+ resp.rows[1].value.should.eql 1
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the keys'
+ //shouldn't this better work? TODO: implement in jquery.couch.js
+ console.log("shouldn't this better work? TODO: implement in jquery.couch.js")
+ db.query(map_function, null, null, {
+ "keys": ["456", "123"],
+ success: function(resp){
+ resp.rows.should.have_length 2
+ resp.rows[0].id.should.eql "456"
+ resp.rows[0].key.should.eql "456"
+ resp.rows[0].value.should.eql 1
+ resp.rows[1].id.should.eql "123"
+ resp.rows[1].key.should.eql "123"
+ resp.rows[1].value.should.eql 1
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options and the keys'
+ //shouldn't this better work? TODO: implement in jquery.couch.js
+ console.log("shouldn't this better work? TODO: implement in jquery.couch.js")
+ db.query(map_function, null, null, {
+ "include_docs":"true",
+ "keys": ["456"],
+ success: function(resp){
+ resp.rows.should.have_length 1
+ resp.rows[0].id.should.eql "456"
+ resp.rows[0].key.should.eql "456"
+ resp.rows[0].value.should.eql 1
+ resp.rows[0].doc["job"].should.eql "pilot"
+ resp.rows[0].doc["_rev"].length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should apply a query in erlang also'
+ // when this test fails, read this: http://wiki.apache.org/couchdb/EnableErlangViews
+ var erlang_map = 'fun({Doc}) -> ' +
+ 'ID = proplists:get_value(<<"_id">>, Doc, null), ' +
+ 'Emit(ID, 1) ' +
+ 'end.';
+ db.query(erlang_map, null, "erlang", {
+ success: function(resp){
+ resp.rows.should.have_length 3
+ resp.rows[0].id.should.eql "123"
+ resp.rows[0].key.should.eql "123"
+ resp.rows[0].value.should.eql 1
+ resp.rows[1].id.should.eql "456"
+ resp.rows[1].key.should.eql "456"
+ resp.rows[1].value.should.eql 1
+ resp.rows[2].id.should.eql "789"
+ resp.rows[2].key.should.eql "789"
+ resp.rows[2].value.should.eql 1
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.query("asdf");
+ alert_msg.should.match /An error occurred querying the database/
+ end
+ end
+
+ describe 'view'
+ before_each
+ db.saveDoc({"Name" : "Cally Tyrol", "job" : "deckhand", "_id" : "789"});
+ db.saveDoc({"Name" : "Felix Gaeta", "job" : "officer", "_id" : "123"});
+ db.saveDoc({"Name" : "Samuel T. Anders", "job" : "pilot", "_id" : "456"});
+ view = {
+ "views" : {
+ "people" : {
+ "map" : "function(doc) { emit(doc._id, doc.Name); }"
+ }
+ },
+ "_id" : "_design/spec_db"
+ };
+ db.saveDoc(view);
+ end
+
+ it 'should apply the view'
+ db.view('spec_db/people', {
+ success: function(resp){
+ resp.rows.should.have_length 3
+ resp.rows[0].id.should.eql "123"
+ resp.rows[0].key.should.eql "123"
+ resp.rows[0].value.should.eql "Felix Gaeta"
+ resp.rows[1].id.should.eql "456"
+ resp.rows[1].key.should.eql "456"
+ resp.rows[1].value.should.eql "Samuel T. Anders"
+ resp.rows[2].id.should.eql "789"
+ resp.rows[2].key.should.eql "789"
+ resp.rows[2].value.should.eql "Cally Tyrol"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options'
+ db.view('spec_db/people', {
+ "skip":"2",
+ success: function(resp){
+ resp.rows.should.have_length 1
+ resp.rows[0].id.should.eql "789"
+ resp.rows[0].key.should.eql "789"
+ resp.rows[0].value.should.eql "Cally Tyrol"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the keys'
+ db.view('spec_db/people', {
+ "keys":["456", "123"],
+ success: function(resp){
+ resp.rows.should.have_length 2
+ resp.rows[0].id.should.eql "456"
+ resp.rows[0].key.should.eql "456"
+ resp.rows[0].value.should.eql "Samuel T. Anders"
+ resp.rows[1].id.should.eql "123"
+ resp.rows[1].key.should.eql "123"
+ resp.rows[1].value.should.eql "Felix Gaeta"
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should pass through the options and the keys'
+ db.view('spec_db/people', {
+ "include_docs":"true",
+ "keys":["456"],
+ success: function(resp){
+ resp.rows.should.have_length 1
+ resp.rows[0].id.should.eql "456"
+ resp.rows[0].key.should.eql "456"
+ resp.rows[0].value.should.eql "Samuel T. Anders"
+ resp.rows[0].doc["job"].should.eql "pilot"
+ resp.rows[0].doc["_rev"].length.should.be_at_least 30
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should throw a 404 when the view doesnt exist'
+ db.view('spec_db/non_existing_view', {
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "missing_named_view"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.view("asdf");
+ alert_msg.should.match /An error occurred accessing the view/
+ end
+ end
+
+ describe 'setDbProperty'
+ it 'should return ok true'
+ db.setDbProperty("_revs_limit", 1500, {
+ success: function(resp){
+ resp.ok.should.be_true
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should set a db property'
+ db.setDbProperty("_revs_limit", 1500);
+ db.getDbProperty("_revs_limit", {
+ success: function(resp){
+ resp.should.eql 1500
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ db.setDbProperty("_revs_limit", 1200);
+ db.getDbProperty("_revs_limit", {
+ success: function(resp){
+ resp.should.eql 1200
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.setDbProperty("asdf");
+ alert_msg.should.match /The property could not be updated/
+ end
+ end
+
+ describe 'getDbProperty'
+ it 'should get a db property'
+ db.setDbProperty("_revs_limit", 1200);
+ db.getDbProperty("_revs_limit", {
+ success: function(resp){
+ resp.should.eql 1200
+ },
+ error: function(status, error, reason){errorCallback(status, error, reason)}
+ });
+ end
+
+ it 'should throw a 404 when the property doesnt exist'
+ db.getDbProperty("_doesnt_exist", {
+ error: function(status, error, reason){
+ status.should.eql 404
+ error.should.eql "not_found"
+ reason.should.eql "missing"
+ },
+ success: function(resp){successCallback(resp)}
+ });
+ end
+
+ it 'should alert with an error message prefix'
+ db.getDbProperty("asdf");
+ alert_msg.should.match /The property could not be retrieved/
+ end
+ end
+end \ No newline at end of file
diff --git a/share/www/spec/run.html b/share/www/spec/run.html
new file mode 100644
index 00000000..e438333d
--- /dev/null
+++ b/share/www/spec/run.html
@@ -0,0 +1,46 @@
+<!--
+Licensed under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations under
+the License.
+-->
+<html>
+ <head>
+ <link type="text/css" rel="stylesheet" href="../script/jspec/jspec.css" />
+ <script src="../script/jquery.js"></script>
+ <script src="../script/sha1.js"></script>
+ <script src="../script/jspec/jspec.js"></script>
+ <script src="../script/jspec/jspec.jquery.js"></script>
+ <script src="../script/jspec/jspec.xhr.js"></script>
+ <script src="./custom_helpers.js"></script>
+ <script src="../script/couch.js"></script>
+ <script src="../script/jquery.couch.js"></script>
+ <script>
+ function runSuites() {
+ JSpec
+ .exec('couch_js_class_methods_spec.js')
+ .exec('couch_js_instance_methods_1_spec.js')
+ .exec('couch_js_instance_methods_2_spec.js')
+ .exec('couch_js_instance_methods_3_spec.js')
+ .exec('jquery_couch_js_class_methods_spec.js')
+ .exec('jquery_couch_js_instance_methods_1_spec.js')
+ .exec('jquery_couch_js_instance_methods_2_spec.js')
+ .exec('jquery_couch_js_instance_methods_3_spec.js')
+ .run({failuresOnly: true})
+ .report()
+ }
+ </script>
+ </head>
+ <body class="jspec" onLoad="runSuites();">
+ <div id="jspec-top"><h2 id="jspec-title">JSpec <em><script>document.write(JSpec.version)</script></em></h2></div>
+ <div id="jspec"></div>
+ <div id="jspec-bottom"></div>
+ </body>
+</html>
diff --git a/share/www/status.html b/share/www/status.html
index 3e23f85d..2067ab9b 100644
--- a/share/www/status.html
+++ b/share/www/status.html
@@ -19,8 +19,10 @@ specific language governing permissions and limitations under the License.
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<link rel="stylesheet" href="style/layout.css?0.11.0" type="text/css">
<script src="script/json2.js"></script>
- <script src="script/jquery.js?1.3.1"></script>
+ <script src="script/sha1.js"></script>
+ <script src="script/jquery.js?1.4.2"></script>
<script src="script/jquery.couch.js?0.11.0"></script>
+ <script src="script/jquery.dialog.js?0.11.0"></script>
<script src="script/futon.js?0.11.0"></script>
</head>
<body><div id="wrap">
diff --git a/share/www/style/layout.css b/share/www/style/layout.css
index b07a59e5..4fff4980 100644
--- a/share/www/style/layout.css
+++ b/share/www/style/layout.css
@@ -55,7 +55,7 @@ code.null { color: #666; }
button { font-size: 100%; -webkit-appearance: square-button; }
button[disabled] { color: #999; }
input, select, textarea { background: #fff; border: 1px solid;
- border-color: #999 #ddd #ddd #999; margin: 0; padding: 1px;
+ border-color: #999 #ddd #ddd #999; color: #000; margin: 0; padding: 1px;
}
input.placeholder { color: #999; }
textarea {
@@ -219,8 +219,8 @@ body.fullwidth #sidebar { border-bottom: 1px solid #333; right: 0;
height: 100%; margin-right: 210px; position: relative;
}
body.fullwidth #wrap { margin-right: 0; }
-#content { padding: 1em 16px 3em 10px; overflow: auto; position: absolute;
- top: 33px; bottom: 0; left: 0; right: 0;
+#content { padding: 1em 16px 3em 10px; overflow: auto; overflow-y: scroll;
+ position: absolute; top: 33px; bottom: 0; left: 0; right: 0;
}
/* Toolbar */
@@ -229,6 +229,7 @@ body.fullwidth #wrap { margin-right: 0; }
margin: 0 0 .5em; padding: 5px 5px 5px 3px;
}
#toolbar li { display: inline; }
+#toolbar li.current {float:right;}
#toolbar button { background: transparent 2px 2px no-repeat; border: none;
color: #666; margin: 0; padding: 2px 1em 2px 22px; cursor: pointer;
font-size: 95%; line-height: 16px;
@@ -236,13 +237,13 @@ body.fullwidth #wrap { margin-right: 0; }
#toolbar button:hover { background-position: 2px -30px; color: #000; }
#toolbar button:active { background-position: 2px -62px; color: #000; }
#toolbar button.add { background-image: url(../image/add.png); }
+#toolbar button.security { background-image: url(../image/key.png); }
#toolbar button.compact { background-image: url(../image/compact.png); }
-#toolbar button.viewcleanup { background-image: url(../image/compact.png); }
-#toolbar button.compactview { background-image: url(../image/compact.png); }
#toolbar button.delete { background-image: url(../image/delete.png); }
#toolbar button.load { background-image: url(../image/load.png); }
#toolbar button.run { background-image: url(../image/run.png); }
#toolbar button.save { background-image: url(../image/save.png); }
+#toolbar button.share { background-image: url(../image/compact.png); }
/* Dialogs */
@@ -260,18 +261,17 @@ body.fullwidth #wrap { margin-right: 0; }
-webkit-box-shadow: 4px 4px 6px #333;
}
*html #dialog { width: 33em; }
-body.loading #dialog h2 {
- background: url(../image/spinner.gif) center no-repeat;
-}
#dialog.loading { width: 220px; height: 80px; }
#dialog.loaded { background-image: none; }
-#dialog h2 { background: #666; border-top: 1px solid #555;
- border-bottom: 1px solid #777; color: #ccc; font-size: 110%;
- font-weight: bold; margin: 0 -2em; padding: .35em 2em;
+#dialog h2 { background: #666 98% 50% no-repeat;
+ border-top: 1px solid #555; border-bottom: 1px solid #777; color: #ccc;
+ font-size: 110%; font-weight: bold; margin: 0 -1em; padding: .35em 1em;
}
-#dialog h3 {
- color: #ccc; font-size: 110%;
- font-weight: bold; margin: 0 -2em; padding: .35em 2em;
+body.loading #dialog h2 {
+ background-image: url(../image/spinner.gif);
+}
+#dialog h3 { color: #ccc; font-size: 100%; font-weight: bold; margin: 0 -2em;
+ padding: .35em 2em 0;
}
#dialog fieldset { background: #222; border-top: 1px solid #111;
margin: 0 0 1em; padding: .5em 1em 1em;
@@ -290,6 +290,11 @@ body.loading #dialog h2 {
#dialog fieldset input { background-color: #e9e9e9; vertical-align: middle; }
#dialog fieldset input.error { background-color: #f9e4e4; }
#dialog fieldset div.error { padding-top: .3em; color: #b33; }
+#dialog fieldset.radiogroup { padding-top: 1em; }
+#dialog fieldset.radiogroup label { position: relative; padding-left: 25px; }
+#dialog fieldset.radiogroup input { position: absolute; left: 5px; top: 2px; }
+#dialog fieldset.radiogroup p.help { margin-top: .5em; margin-left: 25px; }
+#dialog fieldset.radiogroup hr { border-color: #333; margin-left: 25px; }
#dialog .buttons { padding: 0 .5em .5em; text-align: right; }
#dialog .buttons button { background: #444; border: 1px solid #aaa;
color: #ddd; cursor: pointer; font-size: 90%; font-weight: normal;
@@ -319,6 +324,13 @@ body.loading #dialog h2 {
}
#switch select { font-size: 90%; }
+/* Stale views checkbox */
+
+#staleviews {
+ color: #666; float: right; font-size: 90%;
+ font-weight: bold; line-height: 16px; padding: 5px;
+}
+
/* View function editing */
#viewcode { background: #fff; border: 1px solid;
@@ -351,8 +363,8 @@ body.loading #dialog h2 {
white-space: nowrap;
}
#viewcode .code textarea { border: none; border-top: 1px solid #ccc;
- color: #333; font-size: 11px; margin: 0; min-height: 50px; padding: .4em 0 0;
- resize: none; width: 100%; overflow: auto;
+ color: #333; font-size: 11px; margin: 0; min-height: 50px; overflow: auto;
+ padding: .4em 0 0; resize: none; width: 100%;
}
#viewcode .code textarea:focus { background: #e9f4ff; }
#viewcode .bottom { border-bottom: none; clear: left; padding: 1px 3px; }
@@ -491,7 +503,7 @@ body.loading #dialog h2 {
#fields tbody.content td ul.attachments li {
margin-bottom: .3em; min-height: 20px; padding-left: 20px;
}
-#fields tbody.content td ul.attachments tt { font-size: 95%; }
+#fields tbody.content td ul.attachments tt { font-size: 11px; }
#fields tbody.content td ul.attachments li span.info { color: #666;
display: block; font-size: 95%;
}
@@ -598,3 +610,9 @@ form#replicator p.actions { padding: 1px; clear: left; margin: 0;
font-family: "DejaVu Sans Mono",Menlo,Courier,monospace;
font-size: 11px;
}
+
+
+/* Session */
+#loginSignup {
+ font-size:200%;
+} \ No newline at end of file
diff --git a/src/couchdb/Makefile.am b/src/couchdb/Makefile.am
index 9e909964..c325440d 100644
--- a/src/couchdb/Makefile.am
+++ b/src/couchdb/Makefile.am
@@ -17,7 +17,7 @@ couchlibdir = $(localerlanglibdir)/couch-$(version)
couchincludedir = $(couchlibdir)/include
couchebindir = $(couchlibdir)/ebin
-couchinclude_DATA = couch_db.hrl
+couchinclude_DATA = couch_db.hrl couch_js_functions.hrl
couchebin_DATA = $(compiled_files)
# dist_devdoc_DATA = $(doc_base) $(doc_modules)
@@ -29,7 +29,9 @@ CLEANFILES = $(compiled_files) $(doc_base)
source_files = \
couch.erl \
couch_app.erl \
+ couch_auth_cache.erl \
couch_btree.erl \
+ couch_changes.erl \
couch_config.erl \
couch_config_writer.erl \
couch_db.erl \
@@ -48,10 +50,14 @@ source_files = \
couch_httpd_show.erl \
couch_httpd_view.erl \
couch_httpd_misc_handlers.erl \
+ couch_httpd_proxy.erl \
+ couch_httpd_rewrite.erl \
couch_httpd_stats_handlers.erl \
+ couch_httpd_vhost.erl \
couch_key_tree.erl \
couch_log.erl \
couch_native_process.erl \
+ couch_os_daemons.erl \
couch_os_process.erl \
couch_query_servers.erl \
couch_ref_counter.erl \
@@ -63,6 +69,7 @@ source_files = \
couch_rep_reader.erl \
couch_rep_sup.erl \
couch_rep_writer.erl \
+ couch_rep_db_listener.erl \
couch_server.erl \
couch_server_sup.erl \
couch_stats_aggregator.erl \
@@ -78,13 +85,15 @@ source_files = \
couch_db_updater.erl \
couch_work_queue.erl
-EXTRA_DIST = $(source_files) couch_db.hrl
+EXTRA_DIST = $(source_files) couch_db.hrl couch_js_functions.hrl
compiled_files = \
couch.app \
couch.beam \
couch_app.beam \
+ couch_auth_cache.beam \
couch_btree.beam \
+ couch_changes.beam \
couch_config.beam \
couch_config_writer.beam \
couch_db.beam \
@@ -99,14 +108,18 @@ compiled_files = \
couch_httpd_db.beam \
couch_httpd_auth.beam \
couch_httpd_oauth.beam \
+ couch_httpd_proxy.beam \
couch_httpd_external.beam \
couch_httpd_show.beam \
couch_httpd_view.beam \
couch_httpd_misc_handlers.beam \
+ couch_httpd_rewrite.beam \
couch_httpd_stats_handlers.beam \
+ couch_httpd_vhost.beam \
couch_key_tree.beam \
couch_log.beam \
couch_native_process.beam \
+ couch_os_daemons.beam \
couch_os_process.beam \
couch_query_servers.beam \
couch_ref_counter.beam \
@@ -118,6 +131,7 @@ compiled_files = \
couch_rep_reader.beam \
couch_rep_sup.beam \
couch_rep_writer.beam \
+ couch_rep_db_listener.beam \
couch_server.beam \
couch_server_sup.beam \
couch_stats_aggregator.beam \
@@ -175,7 +189,7 @@ couch.app: couch.app.tpl
$@ < $<
else
couch.app: couch.app.tpl
- modules=`find . -name "*.erl" -exec basename {} .erl \; | tr '\n' ',' | sed "s/,$$//"`; \
+ modules=`{ find . -name "*.erl" -exec basename {} .erl \; | tr '\n' ','; echo ''; } | sed "s/,$$//"`; \
sed -e "s|%package_name%|@package_name@|g" \
-e "s|%version%|@version@|g" \
-e "s|@modules@|$$modules|g" \
@@ -190,6 +204,6 @@ endif
# $(ERL) -noshell -run edoc_run files [\"$<\"]
-%.beam: %.erl couch_db.hrl
+%.beam: %.erl couch_db.hrl couch_js_functions.hrl
$(ERLC) $(ERLC_FLAGS) ${TEST} $<;
diff --git a/src/couchdb/couch.app.tpl.in b/src/couchdb/couch.app.tpl.in
index fa86d2ec..36b0b34c 100644
--- a/src/couchdb/couch.app.tpl.in
+++ b/src/couchdb/couch.app.tpl.in
@@ -26,4 +26,4 @@
]}},
{applications, [kernel, stdlib]},
{included_applications, [crypto, sasl, inets, oauth, ibrowse, mochiweb]}
-]}. \ No newline at end of file
+]}.
diff --git a/src/couchdb/couch_app.erl b/src/couchdb/couch_app.erl
index 1b64434a..232953d9 100644
--- a/src/couchdb/couch_app.erl
+++ b/src/couchdb/couch_app.erl
@@ -20,7 +20,7 @@
start(_Type, DefaultIniFiles) ->
IniFiles = get_ini_files(DefaultIniFiles),
- case start_apps([crypto, sasl, inets, oauth, ssl, ibrowse, mochiweb]) of
+ case start_apps([crypto, public_key, sasl, inets, oauth, ssl, ibrowse, mochiweb]) of
ok ->
couch_server_sup:start_link(IniFiles);
{error, Reason} ->
@@ -48,6 +48,9 @@ start_apps([App|Rest]) ->
start_apps(Rest);
{error, {already_started, App}} ->
start_apps(Rest);
+ {error, _Reason} when App =:= public_key ->
+ % ignore on R12B5
+ start_apps(Rest);
{error, _Reason} ->
{error, {app_would_not_start, App}}
end.
diff --git a/src/couchdb/couch_auth_cache.erl b/src/couchdb/couch_auth_cache.erl
new file mode 100644
index 00000000..e0715b88
--- /dev/null
+++ b/src/couchdb/couch_auth_cache.erl
@@ -0,0 +1,419 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_auth_cache).
+-behaviour(gen_server).
+
+% public API
+-export([get_user_creds/1]).
+
+% gen_server API
+-export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
+-export([code_change/3, terminate/2]).
+
+-include("couch_db.hrl").
+-include("couch_js_functions.hrl").
+
+-define(STATE, auth_state_ets).
+-define(BY_USER, auth_by_user_ets).
+-define(BY_ATIME, auth_by_atime_ets).
+
+-record(state, {
+ max_cache_size = 0,
+ cache_size = 0,
+ db_notifier = nil
+}).
+
+
+-spec get_user_creds(UserName::string() | binary()) ->
+ Credentials::list() | nil.
+
+get_user_creds(UserName) when is_list(UserName) ->
+ get_user_creds(?l2b(UserName));
+
+get_user_creds(UserName) ->
+ UserCreds = case couch_config:get("admins", ?b2l(UserName)) of
+ "-hashed-" ++ HashedPwdAndSalt ->
+ % the name is an admin, now check to see if there is a user doc
+ % which has a matching name, salt, and password_sha
+ [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","),
+ case get_from_cache(UserName) of
+ nil ->
+ [{<<"roles">>, [<<"_admin">>]},
+ {<<"salt">>, ?l2b(Salt)},
+ {<<"password_sha">>, ?l2b(HashedPwd)}];
+ UserProps when is_list(UserProps) ->
+ DocRoles = couch_util:get_value(<<"roles">>, UserProps),
+ [{<<"roles">>, [<<"_admin">> | DocRoles]},
+ {<<"salt">>, ?l2b(Salt)},
+ {<<"password_sha">>, ?l2b(HashedPwd)}]
+ end;
+ _Else ->
+ get_from_cache(UserName)
+ end,
+ validate_user_creds(UserCreds).
+
+
+get_from_cache(UserName) ->
+ exec_if_auth_db(
+ fun(_AuthDb) ->
+ maybe_refresh_cache(),
+ case ets:lookup(?BY_USER, UserName) of
+ [] ->
+ gen_server:call(?MODULE, {fetch, UserName}, infinity);
+ [{UserName, {Credentials, _ATime}}] ->
+ couch_stats_collector:increment({couchdb, auth_cache_hits}),
+ gen_server:cast(?MODULE, {cache_hit, UserName}),
+ Credentials
+ end
+ end,
+ nil
+ ).
+
+
+validate_user_creds(nil) ->
+ nil;
+validate_user_creds(UserCreds) ->
+ case couch_util:get_value(<<"_conflicts">>, UserCreds) of
+ undefined ->
+ ok;
+ _ConflictList ->
+ throw({unauthorized,
+ <<"User document conflicts must be resolved before the document",
+ " is used for authentication purposes.">>
+ })
+ end,
+ UserCreds.
+
+
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+
+init(_) ->
+ ?STATE = ets:new(?STATE, [set, protected, named_table]),
+ ?BY_USER = ets:new(?BY_USER, [set, protected, named_table]),
+ ?BY_ATIME = ets:new(?BY_ATIME, [ordered_set, private, named_table]),
+ AuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
+ true = ets:insert(?STATE, {auth_db_name, ?l2b(AuthDbName)}),
+ true = ets:insert(?STATE, {auth_db, open_auth_db()}),
+ process_flag(trap_exit, true),
+ ok = couch_config:register(
+ fun("couch_httpd_auth", "auth_cache_size", SizeList) ->
+ Size = list_to_integer(SizeList),
+ ok = gen_server:call(?MODULE, {new_max_cache_size, Size}, infinity)
+ end
+ ),
+ ok = couch_config:register(
+ fun("couch_httpd_auth", "authentication_db", DbName) ->
+ ok = gen_server:call(?MODULE, {new_auth_db, ?l2b(DbName)}, infinity)
+ end
+ ),
+ {ok, Notifier} = couch_db_update_notifier:start_link(fun handle_db_event/1),
+ State = #state{
+ db_notifier = Notifier,
+ max_cache_size = list_to_integer(
+ couch_config:get("couch_httpd_auth", "auth_cache_size", "50")
+ )
+ },
+ {ok, State}.
+
+
+handle_db_event({Event, DbName}) ->
+ [{auth_db_name, AuthDbName}] = ets:lookup(?STATE, auth_db_name),
+ case DbName =:= AuthDbName of
+ true ->
+ case Event of
+ deleted -> gen_server:call(?MODULE, auth_db_deleted, infinity);
+ created -> gen_server:call(?MODULE, auth_db_created, infinity);
+ compacted -> gen_server:call(?MODULE, auth_db_compacted, infinity);
+ _Else -> ok
+ end;
+ false ->
+ ok
+ end.
+
+
+handle_call({new_auth_db, AuthDbName}, _From, State) ->
+ NewState = clear_cache(State),
+ true = ets:insert(?STATE, {auth_db_name, AuthDbName}),
+ true = ets:insert(?STATE, {auth_db, open_auth_db()}),
+ {reply, ok, NewState};
+
+handle_call(auth_db_deleted, _From, State) ->
+ NewState = clear_cache(State),
+ true = ets:insert(?STATE, {auth_db, nil}),
+ {reply, ok, NewState};
+
+handle_call(auth_db_created, _From, State) ->
+ NewState = clear_cache(State),
+ true = ets:insert(?STATE, {auth_db, open_auth_db()}),
+ {reply, ok, NewState};
+
+handle_call(auth_db_compacted, _From, State) ->
+ exec_if_auth_db(
+ fun(AuthDb) ->
+ true = ets:insert(?STATE, {auth_db, reopen_auth_db(AuthDb)})
+ end
+ ),
+ {reply, ok, State};
+
+handle_call({new_max_cache_size, NewSize}, _From, State) ->
+ case NewSize >= State#state.cache_size of
+ true ->
+ ok;
+ false ->
+ lists:foreach(
+ fun(_) ->
+ LruTime = ets:last(?BY_ATIME),
+ [{LruTime, UserName}] = ets:lookup(?BY_ATIME, LruTime),
+ true = ets:delete(?BY_ATIME, LruTime),
+ true = ets:delete(?BY_USER, UserName)
+ end,
+ lists:seq(1, State#state.cache_size - NewSize)
+ )
+ end,
+ NewState = State#state{
+ max_cache_size = NewSize,
+ cache_size = lists:min([NewSize, State#state.cache_size])
+ },
+ {reply, ok, NewState};
+
+handle_call({fetch, UserName}, _From, State) ->
+ {Credentials, NewState} = case ets:lookup(?BY_USER, UserName) of
+ [{UserName, {Creds, ATime}}] ->
+ couch_stats_collector:increment({couchdb, auth_cache_hits}),
+ cache_hit(UserName, Creds, ATime),
+ {Creds, State};
+ [] ->
+ couch_stats_collector:increment({couchdb, auth_cache_misses}),
+ Creds = get_user_props_from_db(UserName),
+ State1 = add_cache_entry(UserName, Creds, erlang:now(), State),
+ {Creds, State1}
+ end,
+ {reply, Credentials, NewState};
+
+handle_call(refresh, _From, State) ->
+ exec_if_auth_db(fun refresh_entries/1),
+ {reply, ok, State}.
+
+
+handle_cast({cache_hit, UserName}, State) ->
+ case ets:lookup(?BY_USER, UserName) of
+ [{UserName, {Credentials, ATime}}] ->
+ cache_hit(UserName, Credentials, ATime);
+ _ ->
+ ok
+ end,
+ {noreply, State}.
+
+
+handle_info(_Msg, State) ->
+ {noreply, State}.
+
+
+terminate(_Reason, #state{db_notifier = Notifier}) ->
+ couch_db_update_notifier:stop(Notifier),
+ exec_if_auth_db(fun(AuthDb) -> catch couch_db:close(AuthDb) end),
+ true = ets:delete(?BY_USER),
+ true = ets:delete(?BY_ATIME),
+ true = ets:delete(?STATE).
+
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+
+clear_cache(State) ->
+ exec_if_auth_db(fun(AuthDb) -> catch couch_db:close(AuthDb) end),
+ true = ets:delete_all_objects(?BY_USER),
+ true = ets:delete_all_objects(?BY_ATIME),
+ State#state{cache_size = 0}.
+
+
+add_cache_entry(UserName, Credentials, ATime, State) ->
+ case State#state.cache_size >= State#state.max_cache_size of
+ true ->
+ free_mru_cache_entry();
+ false ->
+ ok
+ end,
+ true = ets:insert(?BY_ATIME, {ATime, UserName}),
+ true = ets:insert(?BY_USER, {UserName, {Credentials, ATime}}),
+ State#state{cache_size = couch_util:get_value(size, ets:info(?BY_USER))}.
+
+
+free_mru_cache_entry() ->
+ case ets:last(?BY_ATIME) of
+ '$end_of_table' ->
+ ok; % empty cache
+ LruTime ->
+ [{LruTime, UserName}] = ets:lookup(?BY_ATIME, LruTime),
+ true = ets:delete(?BY_ATIME, LruTime),
+ true = ets:delete(?BY_USER, UserName)
+ end.
+
+
+cache_hit(UserName, Credentials, ATime) ->
+ NewATime = erlang:now(),
+ true = ets:delete(?BY_ATIME, ATime),
+ true = ets:insert(?BY_ATIME, {NewATime, UserName}),
+ true = ets:insert(?BY_USER, {UserName, {Credentials, NewATime}}).
+
+
+refresh_entries(AuthDb) ->
+ case reopen_auth_db(AuthDb) of
+ nil ->
+ ok;
+ AuthDb2 ->
+ case AuthDb2#db.update_seq > AuthDb#db.update_seq of
+ true ->
+ {ok, _, _} = couch_db:enum_docs_since(
+ AuthDb2,
+ AuthDb#db.update_seq,
+ fun(DocInfo, _, _) -> refresh_entry(AuthDb2, DocInfo) end,
+ AuthDb#db.update_seq,
+ []
+ ),
+ true = ets:insert(?STATE, {auth_db, AuthDb2});
+ false ->
+ ok
+ end
+ end.
+
+
+refresh_entry(Db, #doc_info{high_seq = DocSeq} = DocInfo) ->
+ case is_user_doc(DocInfo) of
+ {true, UserName} ->
+ case ets:lookup(?BY_USER, UserName) of
+ [] ->
+ ok;
+ [{UserName, {_OldCreds, ATime}}] ->
+ {ok, Doc} = couch_db:open_doc(Db, DocInfo, [conflicts, deleted]),
+ NewCreds = user_creds(Doc),
+ true = ets:insert(?BY_USER, {UserName, {NewCreds, ATime}})
+ end;
+ false ->
+ ok
+ end,
+ {ok, DocSeq}.
+
+
+user_creds(#doc{deleted = true}) ->
+ nil;
+user_creds(#doc{} = Doc) ->
+ {Creds} = couch_query_servers:json_doc(Doc),
+ Creds.
+
+
+is_user_doc(#doc_info{id = <<"org.couchdb.user:", UserName/binary>>}) ->
+ {true, UserName};
+is_user_doc(_) ->
+ false.
+
+
+maybe_refresh_cache() ->
+ case cache_needs_refresh() of
+ true ->
+ ok = gen_server:call(?MODULE, refresh, infinity);
+ false ->
+ ok
+ end.
+
+
+cache_needs_refresh() ->
+ exec_if_auth_db(
+ fun(AuthDb) ->
+ case reopen_auth_db(AuthDb) of
+ nil ->
+ false;
+ AuthDb2 ->
+ AuthDb2#db.update_seq > AuthDb#db.update_seq
+ end
+ end,
+ false
+ ).
+
+
+reopen_auth_db(AuthDb) ->
+ case (catch couch_db:reopen(AuthDb)) of
+ {ok, AuthDb2} ->
+ AuthDb2;
+ _ ->
+ nil
+ end.
+
+
+exec_if_auth_db(Fun) ->
+ exec_if_auth_db(Fun, ok).
+
+exec_if_auth_db(Fun, DefRes) ->
+ case ets:lookup(?STATE, auth_db) of
+ [{auth_db, #db{} = AuthDb}] ->
+ Fun(AuthDb);
+ _ ->
+ DefRes
+ end.
+
+
+open_auth_db() ->
+ [{auth_db_name, DbName}] = ets:lookup(?STATE, auth_db_name),
+ {ok, AuthDb} = ensure_users_db_exists(DbName, [sys_db]),
+ AuthDb.
+
+
+get_user_props_from_db(UserName) ->
+ exec_if_auth_db(
+ fun(AuthDb) ->
+ Db = reopen_auth_db(AuthDb),
+ DocId = <<"org.couchdb.user:", UserName/binary>>,
+ try
+ {ok, Doc} = couch_db:open_doc(Db, DocId, [conflicts]),
+ {DocProps} = couch_query_servers:json_doc(Doc),
+ DocProps
+ catch
+ _:_Error ->
+ nil
+ end
+ end,
+ nil
+ ).
+
+ensure_users_db_exists(DbName, Options) ->
+ Options1 = [{user_ctx, #user_ctx{roles=[<<"_admin">>]}} | Options],
+ case couch_db:open(DbName, Options1) of
+ {ok, Db} ->
+ ensure_auth_ddoc_exists(Db, <<"_design/_auth">>),
+ {ok, Db};
+ _Error ->
+ {ok, Db} = couch_db:create(DbName, Options1),
+ ok = ensure_auth_ddoc_exists(Db, <<"_design/_auth">>),
+ {ok, Db}
+ end.
+
+ensure_auth_ddoc_exists(Db, DDocId) ->
+ case couch_db:open_doc(Db, DDocId) of
+ {not_found, _Reason} ->
+ {ok, AuthDesign} = auth_design_doc(DDocId),
+ {ok, _Rev} = couch_db:update_doc(Db, AuthDesign, []);
+ _ ->
+ ok
+ end,
+ ok.
+
+auth_design_doc(DocId) ->
+ DocProps = [
+ {<<"_id">>, DocId},
+ {<<"language">>,<<"javascript">>},
+ {<<"validate_doc_update">>, ?AUTH_DB_DOC_VALIDATE_FUNCTION}
+ ],
+ {ok, couch_doc:from_json_obj({DocProps})}.
diff --git a/src/couchdb/couch_btree.erl b/src/couchdb/couch_btree.erl
index 73d50805..c63cd8cf 100644
--- a/src/couchdb/couch_btree.erl
+++ b/src/couchdb/couch_btree.erl
@@ -13,7 +13,7 @@
-module(couch_btree).
-export([open/2, open/3, query_modify/4, add/2, add_remove/3]).
--export([fold/4, full_reduce/1, final_reduce/2,foldl/3,foldl/4]).
+-export([fold/4, full_reduce/1, final_reduce/2, foldl/3, foldl/4]).
-export([fold_reduce/4, lookup/2, get_state/1, set_options/2]).
-define(CHUNK_THRESHOLD, 16#4ff).
@@ -70,10 +70,10 @@ final_reduce(Reduce, {KVs, Reductions}) ->
final_reduce(Reduce, {[], [Red | Reductions]}).
fold_reduce(#btree{root=Root}=Bt, Fun, Acc, Options) ->
- Dir = proplists:get_value(dir, Options, fwd),
- StartKey = proplists:get_value(start_key, Options),
- EndKey = proplists:get_value(end_key, Options),
- KeyGroupFun = proplists:get_value(key_group_fun, Options, fun(_,_) -> true end),
+ Dir = couch_util:get_value(dir, Options, fwd),
+ StartKey = couch_util:get_value(start_key, Options),
+ EndKey = couch_util:get_value(end_key, Options),
+ KeyGroupFun = couch_util:get_value(key_group_fun, Options, fun(_,_) -> true end),
{StartKey2, EndKey2} =
case Dir of
rev -> {EndKey, StartKey};
@@ -107,9 +107,9 @@ convert_fun_arity(Fun) when is_function(Fun, 3) ->
Fun. % Already arity 3
make_key_in_end_range_function(#btree{less=Less}, fwd, Options) ->
- case proplists:get_value(end_key_gt, Options) of
+ case couch_util:get_value(end_key_gt, Options) of
undefined ->
- case proplists:get_value(end_key, Options) of
+ case couch_util:get_value(end_key, Options) of
undefined ->
fun(_Key) -> true end;
LastKey ->
@@ -119,9 +119,9 @@ make_key_in_end_range_function(#btree{less=Less}, fwd, Options) ->
fun(Key) -> Less(Key, EndKey) end
end;
make_key_in_end_range_function(#btree{less=Less}, rev, Options) ->
- case proplists:get_value(end_key_gt, Options) of
+ case couch_util:get_value(end_key_gt, Options) of
undefined ->
- case proplists:get_value(end_key, Options) of
+ case couch_util:get_value(end_key, Options) of
undefined ->
fun(_Key) -> true end;
LastKey ->
@@ -142,15 +142,15 @@ foldl(Bt, Fun, Acc, Options) ->
fold(#btree{root=nil}, _Fun, Acc, _Options) ->
{ok, {[], []}, Acc};
fold(#btree{root=Root}=Bt, Fun, Acc, Options) ->
- Dir = proplists:get_value(dir, Options, fwd),
+ Dir = couch_util:get_value(dir, Options, fwd),
InRange = make_key_in_end_range_function(Bt, Dir, Options),
Result =
- case proplists:get_value(start_key, Options) of
+ case couch_util:get_value(start_key, Options) of
undefined ->
- stream_node(Bt, [], Bt#btree.root, InRange, Dir,
+ stream_node(Bt, [], Bt#btree.root, InRange, Dir,
convert_fun_arity(Fun), Acc);
StartKey ->
- stream_node(Bt, [], Bt#btree.root, StartKey, InRange, Dir,
+ stream_node(Bt, [], Bt#btree.root, StartKey, InRange, Dir,
convert_fun_arity(Fun), Acc)
end,
case Result of
@@ -191,7 +191,7 @@ query_modify(Bt, LookupKeys, InsertValues, RemoveKeys) ->
{ok, NewRoot, Bt3} = complete_root(Bt2, KeyPointers),
{ok, QueryResults, Bt3#btree{root=NewRoot}}.
-% for ordering different operatations with the same key.
+% for ordering different operations with the same key.
% fetch < remove < insert
op_order(fetch) -> 1;
op_order(remove) -> 2;
@@ -203,8 +203,7 @@ lookup(#btree{root=Root, less=Less}=Bt, Keys) ->
% We want to return the results in the same order as the keys were input
% but we may have changed the order when we sorted. So we need to put the
% order back into the results.
- KeyDict = dict:from_list(SortedResults),
- [dict:fetch(Key, KeyDict) || Key <- Keys].
+ couch_util:reorder_results(Keys, SortedResults).
lookup(_Bt, nil, Keys) ->
{ok, [{Key, not_found} || Key <- Keys]};
@@ -271,29 +270,26 @@ complete_root(Bt, KPs) ->
% written. Plus with the "case byte_size(term_to_binary(InList)) of" code
% it's probably really inefficient.
-% dialyzer says this pattern is never matched
-% chunkify(_Bt, []) ->
-% [];
-chunkify(Bt, InList) ->
+chunkify(InList) ->
case byte_size(term_to_binary(InList)) of
Size when Size > ?CHUNK_THRESHOLD ->
NumberOfChunksLikely = ((Size div ?CHUNK_THRESHOLD) + 1),
ChunkThreshold = Size div NumberOfChunksLikely,
- chunkify(Bt, InList, ChunkThreshold, [], 0, []);
+ chunkify(InList, ChunkThreshold, [], 0, []);
_Else ->
[InList]
end.
-chunkify(_Bt, [], _ChunkThreshold, [], 0, OutputChunks) ->
+chunkify([], _ChunkThreshold, [], 0, OutputChunks) ->
lists:reverse(OutputChunks);
-chunkify(_Bt, [], _ChunkThreshold, OutList, _OutListSize, OutputChunks) ->
+chunkify([], _ChunkThreshold, OutList, _OutListSize, OutputChunks) ->
lists:reverse([lists:reverse(OutList) | OutputChunks]);
-chunkify(Bt, [InElement | RestInList], ChunkThreshold, OutList, OutListSize, OutputChunks) ->
+chunkify([InElement | RestInList], ChunkThreshold, OutList, OutListSize, OutputChunks) ->
case byte_size(term_to_binary(InElement)) of
Size when (Size + OutListSize) > ChunkThreshold andalso OutList /= [] ->
- chunkify(Bt, RestInList, ChunkThreshold, [], 0, [lists:reverse([InElement | OutList]) | OutputChunks]);
+ chunkify(RestInList, ChunkThreshold, [], 0, [lists:reverse([InElement | OutList]) | OutputChunks]);
Size ->
- chunkify(Bt, RestInList, ChunkThreshold, [InElement | OutList], OutListSize + Size, OutputChunks)
+ chunkify(RestInList, ChunkThreshold, [InElement | OutList], OutListSize + Size, OutputChunks)
end.
modify_node(Bt, RootPointerInfo, Actions, QueryOutput) ->
@@ -336,7 +332,7 @@ get_node(#btree{fd = Fd}, NodePos) ->
write_node(Bt, NodeType, NodeList) ->
% split up nodes into smaller sizes
- NodeListList = chunkify(Bt, NodeList),
+ NodeListList = chunkify(NodeList),
% now write out each chunk and return the KeyPointer pairs for those nodes
ResultList = [
begin
diff --git a/src/couchdb/couch_changes.erl b/src/couchdb/couch_changes.erl
new file mode 100644
index 00000000..1e53161b
--- /dev/null
+++ b/src/couchdb/couch_changes.erl
@@ -0,0 +1,334 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_changes).
+-include("couch_db.hrl").
+
+-export([handle_changes/3]).
+
+%% @type Req -> #httpd{} | {json_req, JsonObj()}
+handle_changes(#changes_args{style=Style}=Args1, Req, Db) ->
+ #changes_args{feed = Feed} = Args = Args1#changes_args{
+ filter = make_filter_fun(Args1#changes_args.filter, Style, Req, Db)
+ },
+ StartSeq = case Args#changes_args.dir of
+ rev ->
+ couch_db:get_update_seq(Db);
+ fwd ->
+ Args#changes_args.since
+ end,
+ if Feed == "continuous" orelse Feed == "longpoll" ->
+ fun(CallbackAcc) ->
+ {Callback, UserAcc} = get_callback_acc(CallbackAcc),
+ Self = self(),
+ {ok, Notify} = couch_db_update_notifier:start_link(
+ fun({_, DbName}) when DbName == Db#db.name ->
+ Self ! db_updated;
+ (_) ->
+ ok
+ end
+ ),
+ UserAcc2 = start_sending_changes(Callback, UserAcc, Feed),
+ {Timeout, TimeoutFun} = get_changes_timeout(Args, Callback),
+ try
+ keep_sending_changes(
+ Args,
+ Callback,
+ UserAcc2,
+ Db,
+ StartSeq,
+ <<"">>,
+ Timeout,
+ TimeoutFun
+ )
+ after
+ couch_db_update_notifier:stop(Notify),
+ get_rest_db_updated(ok) % clean out any remaining update messages
+ end
+ end;
+ true ->
+ fun(CallbackAcc) ->
+ {Callback, UserAcc} = get_callback_acc(CallbackAcc),
+ UserAcc2 = start_sending_changes(Callback, UserAcc, Feed),
+ {ok, {_, LastSeq, _Prepend, _, _, UserAcc3, _, _, _}} =
+ send_changes(
+ Args#changes_args{feed="normal"},
+ Callback,
+ UserAcc2,
+ Db,
+ StartSeq,
+ <<>>
+ ),
+ end_sending_changes(Callback, UserAcc3, LastSeq, Feed)
+ end
+ end.
+
+get_callback_acc({Callback, _UserAcc} = Pair) when is_function(Callback, 3) ->
+ Pair;
+get_callback_acc(Callback) when is_function(Callback, 2) ->
+ {fun(Ev, Data, _) -> Callback(Ev, Data) end, ok}.
+
+%% @type Req -> #httpd{} | {json_req, JsonObj()}
+make_filter_fun([$_ | _] = FilterName, Style, Req, Db) ->
+ builtin_filter_fun(FilterName, Style, Req, Db);
+make_filter_fun(FilterName, Style, Req, Db) ->
+ os_filter_fun(FilterName, Style, Req, Db).
+
+os_filter_fun(FilterName, Style, Req, Db) ->
+ case [list_to_binary(couch_httpd:unquote(Part))
+ || Part <- string:tokens(FilterName, "/")] of
+ [] ->
+ fun(#doc_info{revs=Revs}) ->
+ builtin_results(Style, Revs)
+ end;
+ [DName, FName] ->
+ DesignId = <<"_design/", DName/binary>>,
+ DDoc = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
+ % validate that the ddoc has the filter fun
+ #doc{body={Props}} = DDoc,
+ couch_util:get_nested_json_value({Props}, [<<"filters">>, FName]),
+ fun(DocInfo) ->
+ DocInfos =
+ case Style of
+ main_only ->
+ [DocInfo];
+ all_docs ->
+ [DocInfo#doc_info{revs=[Rev]}|| Rev <- DocInfo#doc_info.revs]
+ end,
+ Docs = [Doc || {ok, Doc} <- [
+ couch_db:open_doc(Db, DocInfo2, [deleted, conflicts])
+ || DocInfo2 <- DocInfos]],
+ {ok, Passes} = couch_query_servers:filter_docs(
+ Req, Db, DDoc, FName, Docs
+ ),
+ [{[{<<"rev">>, couch_doc:rev_to_str({RevPos,RevId})}]}
+ || {Pass, #doc{revs={RevPos,[RevId|_]}}}
+ <- lists:zip(Passes, Docs), Pass == true]
+ end;
+ _Else ->
+ throw({bad_request,
+ "filter parameter must be of the form `designname/filtername`"})
+ end.
+
+builtin_filter_fun("_doc_ids", Style, {json_req, {Props}}, _Db) ->
+ filter_docids(couch_util:get_value(<<"doc_ids">>, Props), Style);
+builtin_filter_fun("_doc_ids", Style, #httpd{method='POST'}=Req, _Db) ->
+ {Props} = couch_httpd:json_body_obj(Req),
+ DocIds = couch_util:get_value(<<"doc_ids">>, Props, nil),
+ filter_docids(DocIds, Style);
+builtin_filter_fun("_doc_ids", Style, #httpd{method='GET'}=Req, _Db) ->
+ DocIds = ?JSON_DECODE(couch_httpd:qs_value(Req, "doc_ids", "null")),
+ filter_docids(DocIds, Style);
+builtin_filter_fun("_design", Style, _Req, _Db) ->
+ filter_designdoc(Style);
+builtin_filter_fun(_FilterName, _Style, _Req, _Db) ->
+ throw({bad_request, "unknown builtin filter name"}).
+
+filter_docids(DocIds, Style) when is_list(DocIds)->
+ fun(#doc_info{id=DocId, revs=Revs}) ->
+ case lists:member(DocId, DocIds) of
+ true ->
+ builtin_results(Style, Revs);
+ _ -> []
+ end
+ end;
+filter_docids(_, _) ->
+ throw({bad_request, "`doc_ids` filter parameter is not a list."}).
+
+filter_designdoc(Style) ->
+ fun(#doc_info{id=DocId, revs=Revs}) ->
+ case DocId of
+ <<"_design", _/binary>> ->
+ builtin_results(Style, Revs);
+ _ -> []
+ end
+ end.
+
+builtin_results(Style, [#rev_info{rev=Rev}|_]=Revs) ->
+ case Style of
+ main_only ->
+ [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}];
+ all_docs ->
+ [{[{<<"rev">>, couch_doc:rev_to_str(R)}]}
+ || #rev_info{rev=R} <- Revs]
+ end.
+
+get_changes_timeout(Args, Callback) ->
+ #changes_args{
+ heartbeat = Heartbeat,
+ timeout = Timeout,
+ feed = ResponseType
+ } = Args,
+ DefaultTimeout = list_to_integer(
+ couch_config:get("httpd", "changes_timeout", "60000")
+ ),
+ case Heartbeat of
+ undefined ->
+ case Timeout of
+ undefined ->
+ {DefaultTimeout, fun(UserAcc) -> {stop, UserAcc} end};
+ infinity ->
+ {infinity, fun(UserAcc) -> {stop, UserAcc} end};
+ _ ->
+ {lists:min([DefaultTimeout, Timeout]),
+ fun(UserAcc) -> {stop, UserAcc} end}
+ end;
+ true ->
+ {DefaultTimeout,
+ fun(UserAcc) -> {ok, Callback(timeout, ResponseType, UserAcc)} end};
+ _ ->
+ {lists:min([DefaultTimeout, Heartbeat]),
+ fun(UserAcc) -> {ok, Callback(timeout, ResponseType, UserAcc)} end}
+ end.
+
+start_sending_changes(_Callback, UserAcc, "continuous") ->
+ UserAcc;
+start_sending_changes(Callback, UserAcc, ResponseType) ->
+ Callback(start, ResponseType, UserAcc).
+
+send_changes(Args, Callback, UserAcc, Db, StartSeq, Prepend) ->
+ #changes_args{
+ style = Style,
+ include_docs = IncludeDocs,
+ limit = Limit,
+ feed = ResponseType,
+ dir = Dir,
+ filter = FilterFun
+ } = Args,
+ couch_db:changes_since(
+ Db,
+ Style,
+ StartSeq,
+ fun changes_enumerator/2,
+ [{dir, Dir}],
+ {Db, StartSeq, Prepend, FilterFun, Callback, UserAcc, ResponseType,
+ Limit, IncludeDocs}
+ ).
+
+keep_sending_changes(Args, Callback, UserAcc, Db, StartSeq, Prepend, Timeout,
+ TimeoutFun) ->
+ #changes_args{
+ feed = ResponseType,
+ limit = Limit,
+ db_open_options = DbOptions
+ } = Args,
+ % ?LOG_INFO("send_changes start ~p",[StartSeq]),
+ {ok, {_, EndSeq, Prepend2, _, _, UserAcc2, _, NewLimit, _}} = send_changes(
+ Args#changes_args{dir=fwd}, Callback, UserAcc, Db, StartSeq, Prepend
+ ),
+ % ?LOG_INFO("send_changes last ~p",[EndSeq]),
+ couch_db:close(Db),
+ if Limit > NewLimit, ResponseType == "longpoll" ->
+ end_sending_changes(Callback, UserAcc2, EndSeq, ResponseType);
+ true ->
+ case wait_db_updated(Timeout, TimeoutFun, UserAcc2) of
+ {updated, UserAcc3} ->
+ % ?LOG_INFO("wait_db_updated updated ~p",[{Db#db.name, EndSeq}]),
+ DbOptions1 = [{user_ctx, Db#db.user_ctx} | DbOptions],
+ case couch_db:open(Db#db.name, DbOptions1) of
+ {ok, Db2} ->
+ keep_sending_changes(
+ Args#changes_args{limit=NewLimit},
+ Callback,
+ UserAcc3,
+ Db2,
+ EndSeq,
+ Prepend2,
+ Timeout,
+ TimeoutFun
+ );
+ _Else ->
+ end_sending_changes(Callback, UserAcc2, EndSeq, ResponseType)
+ end;
+ {stop, UserAcc3} ->
+ % ?LOG_INFO("wait_db_updated stop ~p",[{Db#db.name, EndSeq}]),
+ end_sending_changes(Callback, UserAcc3, EndSeq, ResponseType)
+ end
+ end.
+
+end_sending_changes(Callback, UserAcc, EndSeq, ResponseType) ->
+ Callback({stop, EndSeq}, ResponseType, UserAcc).
+
+changes_enumerator(DocInfo, {Db, _, _, FilterFun, Callback, UserAcc,
+ "continuous", Limit, IncludeDocs}) ->
+
+ #doc_info{id=Id, high_seq=Seq,
+ revs=[#rev_info{deleted=Del,rev=Rev}|_]} = DocInfo,
+ Results0 = FilterFun(DocInfo),
+ Results = [Result || Result <- Results0, Result /= null],
+ Go = if Limit =< 1 -> stop; true -> ok end,
+ case Results of
+ [] ->
+ {Go, {Db, Seq, nil, FilterFun, Callback, UserAcc, "continuous", Limit,
+ IncludeDocs}
+ };
+ _ ->
+ ChangesRow = changes_row(Db, Seq, Id, Del, Results, Rev, IncludeDocs),
+ UserAcc2 = Callback({change, ChangesRow, <<>>}, "continuous", UserAcc),
+ {Go, {Db, Seq, nil, FilterFun, Callback, UserAcc2, "continuous",
+ Limit - 1, IncludeDocs}
+ }
+ end;
+changes_enumerator(DocInfo, {Db, _, Prepend, FilterFun, Callback, UserAcc,
+ ResponseType, Limit, IncludeDocs}) ->
+
+ #doc_info{id=Id, high_seq=Seq, revs=[#rev_info{deleted=Del,rev=Rev}|_]}
+ = DocInfo,
+ Results0 = FilterFun(DocInfo),
+ Results = [Result || Result <- Results0, Result /= null],
+ Go = if Limit =< 1 -> stop; true -> ok end,
+ case Results of
+ [] ->
+ {Go, {Db, Seq, Prepend, FilterFun, Callback, UserAcc, ResponseType,
+ Limit, IncludeDocs}
+ };
+ _ ->
+ ChangesRow = changes_row(Db, Seq, Id, Del, Results, Rev, IncludeDocs),
+ UserAcc2 = Callback({change, ChangesRow, Prepend}, ResponseType, UserAcc),
+ {Go, {Db, Seq, <<",\n">>, FilterFun, Callback, UserAcc2, ResponseType,
+ Limit - 1, IncludeDocs}
+ }
+ end.
+
+
+changes_row(Db, Seq, Id, Del, Results, Rev, true) ->
+ {[{<<"seq">>, Seq}, {<<"id">>, Id}, {<<"changes">>, Results}] ++
+ deleted_item(Del) ++ couch_httpd_view:doc_member(Db, {Id, Rev})};
+changes_row(_, Seq, Id, Del, Results, _, false) ->
+ {[{<<"seq">>, Seq}, {<<"id">>, Id}, {<<"changes">>, Results}] ++
+ deleted_item(Del)}.
+
+deleted_item(true) -> [{<<"deleted">>, true}];
+deleted_item(_) -> [].
+
+% waits for a db_updated msg, if there are multiple msgs, collects them.
+wait_db_updated(Timeout, TimeoutFun, UserAcc) ->
+ receive
+ db_updated ->
+ get_rest_db_updated(UserAcc)
+ after Timeout ->
+ {Go, UserAcc2} = TimeoutFun(UserAcc),
+ case Go of
+ ok ->
+ wait_db_updated(Timeout, TimeoutFun, UserAcc2);
+ stop ->
+ {stop, UserAcc2}
+ end
+ end.
+
+get_rest_db_updated(UserAcc) ->
+ receive
+ db_updated ->
+ get_rest_db_updated(UserAcc)
+ after 0 ->
+ {updated, UserAcc}
+ end.
diff --git a/src/couchdb/couch_config.erl b/src/couchdb/couch_config.erl
index d8473e08..be53e3a3 100644
--- a/src/couchdb/couch_config.erl
+++ b/src/couchdb/couch_config.erl
@@ -44,7 +44,7 @@ stop() ->
all() ->
- lists:sort(gen_server:call(?MODULE, all)).
+ lists:sort(gen_server:call(?MODULE, all, infinity)).
get(Section) when is_binary(Section) ->
@@ -111,7 +111,7 @@ terminate(_Reason, _State) ->
handle_call(all, _From, Config) ->
Resp = lists:sort((ets:tab2list(?MODULE))),
{reply, Resp, Config};
-handle_call({set, Sec, Key, Val, Persist}, _From, Config) ->
+handle_call({set, Sec, Key, Val, Persist}, From, Config) ->
true = ets:insert(?MODULE, {{Sec, Key}, Val}),
case {Persist, Config#config.write_filename} of
{true, undefined} ->
@@ -121,9 +121,12 @@ handle_call({set, Sec, Key, Val, Persist}, _From, Config) ->
_ ->
ok
end,
- [catch F(Sec, Key, Val, Persist) || {_Pid, F} <- Config#config.notify_funs],
- {reply, ok, Config};
-handle_call({delete, Sec, Key, Persist}, _From, Config) ->
+ spawn_link(fun() ->
+ [catch F(Sec, Key, Val, Persist) || {_Pid, F} <- Config#config.notify_funs],
+ gen_server:reply(From, ok)
+ end),
+ {noreply, Config};
+handle_call({delete, Sec, Key, Persist}, From, Config) ->
true = ets:delete(?MODULE, {Sec,Key}),
case {Persist, Config#config.write_filename} of
{true, undefined} ->
@@ -133,8 +136,11 @@ handle_call({delete, Sec, Key, Persist}, _From, Config) ->
_ ->
ok
end,
- [catch F(Sec, Key, deleted, Persist) || {_Pid, F} <- Config#config.notify_funs],
- {reply, ok, Config};
+ spawn_link(fun() ->
+ [catch F(Sec, Key, deleted, Persist) || {_Pid, F} <- Config#config.notify_funs],
+ gen_server:reply(From, ok)
+ end),
+ {noreply, Config};
handle_call({register, Fun, Pid}, _From, #config{notify_funs=PidFuns}=Config) ->
erlang:monitor(process, Pid),
% convert 1 and 2 arity to 3 arity
@@ -194,8 +200,28 @@ parse_ini_file(IniFile) ->
{AccSectionName, AccValues};
Line2 ->
case re:split(Line2, "\s?=\s?", [{return, list}]) of
- [_SingleElement] -> % no "=" found, ignore this line
- {AccSectionName, AccValues};
+ [Value] ->
+ MultiLineValuePart = case re:run(Line, "^ \\S", []) of
+ {match, _} ->
+ true;
+ _ ->
+ false
+ end,
+ case {MultiLineValuePart, AccValues} of
+ {true, [{{_, ValueName}, PrevValue} | AccValuesRest]} ->
+ % remove comment
+ case re:split(Value, " ;|\t;", [{return, list}]) of
+ [[]] ->
+ % empty line
+ {AccSectionName, AccValues};
+ [LineValue | _Rest] ->
+ E = {{AccSectionName, ValueName},
+ PrevValue ++ " " ++ LineValue},
+ {AccSectionName, [E | AccValuesRest]}
+ end;
+ _ ->
+ {AccSectionName, AccValues}
+ end;
[""|_LineValues] -> % line begins with "=", ignore
{AccSectionName, AccValues};
[ValueName|LineValues] -> % yeehaw, got a line!
@@ -205,7 +231,7 @@ parse_ini_file(IniFile) ->
[[]] ->
% empty line means delete this key
ets:delete(?MODULE, {AccSectionName, ValueName}),
- {AccSectionName, AccValues};
+ {AccSectionName, AccValues};
[LineValue | _Rest] ->
{AccSectionName,
[{{AccSectionName, ValueName}, LineValue} | AccValues]}
diff --git a/src/couchdb/couch_db.erl b/src/couchdb/couch_db.erl
index 79e00ff8..4e945ad4 100644
--- a/src/couchdb/couch_db.erl
+++ b/src/couchdb/couch_db.erl
@@ -13,18 +13,21 @@
-module(couch_db).
-behaviour(gen_server).
--export([open/2,close/1,create/2,start_compact/1,get_db_info/1,get_design_docs/1]).
+-export([open/2,open_int/2,close/1,create/2,start_compact/1,get_db_info/1,get_design_docs/1]).
-export([open_ref_counted/2,is_idle/1,monitor/1,count_changes_since/2]).
-export([update_doc/3,update_doc/4,update_docs/4,update_docs/2,update_docs/3,delete_doc/3]).
-export([get_doc_info/2,open_doc/2,open_doc/3,open_doc_revs/4]).
--export([set_revs_limit/2,get_revs_limit/1,register_update_notifier/3]).
+-export([set_revs_limit/2,get_revs_limit/1]).
-export([get_missing_revs/2,name/1,doc_to_tree/1,get_update_seq/1,get_committed_update_seq/1]).
-export([enum_docs/4,enum_docs_since/5]).
-export([enum_docs_since_reduce_to_count/1,enum_docs_reduce_to_count/1]).
-export([increment_update_seq/1,get_purge_seq/1,purge_docs/2,get_last_purged/1]).
--export([start_link/3,open_doc_int/3,set_admins/2,get_admins/1,ensure_full_commit/1]).
+-export([start_link/3,open_doc_int/3,ensure_full_commit/1]).
+-export([set_security/2,get_security/1]).
-export([init/1,terminate/2,handle_call/3,handle_cast/2,code_change/3,handle_info/2]).
-export([changes_since/5,changes_since/6,read_doc/2,new_revid/1]).
+-export([check_is_admin/1, check_is_reader/1]).
+-export([reopen/1]).
-include("couch_db.hrl").
@@ -63,9 +66,39 @@ open_db_file(Filepath, Options) ->
create(DbName, Options) ->
couch_server:create(DbName, Options).
-open(DbName, Options) ->
+% this is for opening a database for internal purposes like the replicator
+% or the view indexer. it never throws a reader error.
+open_int(DbName, Options) ->
couch_server:open(DbName, Options).
+% this should be called anytime an http request opens the database.
+% it ensures that the http userCtx is a valid reader
+open(DbName, Options) ->
+ case couch_server:open(DbName, Options) of
+ {ok, Db} ->
+ try
+ check_is_reader(Db),
+ {ok, Db}
+ catch
+ throw:Error ->
+ close(Db),
+ throw(Error)
+ end;
+ Else -> Else
+ end.
+
+reopen(#db{main_pid = Pid, fd_ref_counter = OldRefCntr, user_ctx = UserCtx}) ->
+ {ok, #db{fd_ref_counter = NewRefCntr} = NewDb} =
+ gen_server:call(Pid, get_db, infinity),
+ case NewRefCntr =:= OldRefCntr of
+ true ->
+ ok;
+ false ->
+ couch_ref_counter:add(NewRefCntr),
+ couch_ref_counter:drop(OldRefCntr)
+ end,
+ {ok, NewDb#db{user_ctx = UserCtx}}.
+
ensure_full_commit(#db{update_pid=UpdatePid,instance_start_time=StartTime}) ->
ok = gen_server:call(UpdatePid, full_commit, infinity),
{ok, StartTime}.
@@ -73,9 +106,8 @@ ensure_full_commit(#db{update_pid=UpdatePid,instance_start_time=StartTime}) ->
close(#db{fd_ref_counter=RefCntr}) ->
couch_ref_counter:drop(RefCntr).
-open_ref_counted(MainPid, UserCtx) ->
- {ok, Db} = gen_server:call(MainPid, {open_ref_count, self()}),
- {ok, Db#db{user_ctx=UserCtx}}.
+open_ref_counted(MainPid, OpenedPid) ->
+ gen_server:call(MainPid, {open_ref_count, OpenedPid}).
is_idle(MainPid) ->
gen_server:call(MainPid, is_idle).
@@ -83,11 +115,8 @@ is_idle(MainPid) ->
monitor(#db{main_pid=MainPid}) ->
erlang:monitor(process, MainPid).
-register_update_notifier(#db{main_pid=Pid}, Seq, Fun) ->
- gen_server:call(Pid, {register_update_notifier, Seq, Fun}).
-
start_compact(#db{update_pid=Pid}) ->
- gen_server:cast(Pid, start_compact).
+ gen_server:call(Pid, start_compact).
delete_doc(Db, Id, Revisions) ->
DeletedDocs = [#doc{id=Id, revs=[Rev], deleted=true} || Rev <- Revisions],
@@ -98,23 +127,52 @@ open_doc(Db, IdOrDocInfo) ->
open_doc(Db, IdOrDocInfo, []).
open_doc(Db, Id, Options) ->
- couch_stats_collector:increment({couchdb, database_reads}),
+ increment_stat(Db, {couchdb, database_reads}),
case open_doc_int(Db, Id, Options) of
{ok, #doc{deleted=true}=Doc} ->
case lists:member(deleted, Options) of
true ->
- {ok, Doc};
+ apply_open_options({ok, Doc},Options);
false ->
{not_found, deleted}
end;
Else ->
- Else
+ apply_open_options(Else,Options)
+ end.
+
+apply_open_options({ok, Doc},Options) ->
+ apply_open_options2(Doc,Options);
+apply_open_options(Else,_Options) ->
+ Else.
+
+apply_open_options2(Doc,[]) ->
+ {ok, Doc};
+apply_open_options2(#doc{atts=Atts,revs=Revs}=Doc,
+ [{atts_since, PossibleAncestors}|Rest]) ->
+ RevPos = find_ancestor_rev_pos(Revs, PossibleAncestors),
+ apply_open_options2(Doc#doc{atts=[A#att{data=
+ if AttPos>RevPos -> Data; true -> stub end}
+ || #att{revpos=AttPos,data=Data}=A <- Atts]}, Rest);
+apply_open_options2(Doc,[_|Rest]) ->
+ apply_open_options2(Doc,Rest).
+
+
+find_ancestor_rev_pos({_, []}, _AttsSinceRevs) ->
+ 0;
+find_ancestor_rev_pos(_DocRevs, []) ->
+ 0;
+find_ancestor_rev_pos({RevPos, [RevId|Rest]}, AttsSinceRevs) ->
+ case lists:member({RevPos, RevId}, AttsSinceRevs) of
+ true ->
+ RevPos;
+ false ->
+ find_ancestor_rev_pos({RevPos - 1, Rest}, AttsSinceRevs)
end.
open_doc_revs(Db, Id, Revs, Options) ->
- couch_stats_collector:increment({couchdb, database_reads}),
- [Result] = open_doc_revs_int(Db, [{Id, Revs}], Options),
- Result.
+ increment_stat(Db, {couchdb, database_reads}),
+ [{ok, Results}] = open_doc_revs_int(Db, [{Id, Revs}], Options),
+ {ok, [apply_open_options(Result, Options) || Result <- Results]}.
% Each returned result is a list of tuples:
% {Id, MissingRevs, PossibleAncestors}
@@ -135,9 +193,9 @@ find_missing([{Id, Revs}|RestIdRevs], [{ok, FullInfo} | RestLookupInfo]) ->
% Find the revs that are possible parents of this rev
PossibleAncestors =
lists:foldl(fun({LeafPos, LeafRevId}, Acc) ->
- % this leaf is a "possible ancenstor" of the missing
+ % this leaf is a "possible ancenstor" of the missing
% revs if this LeafPos lessthan any of the missing revs
- case lists:any(fun({MissingPos, _}) ->
+ case lists:any(fun({MissingPos, _}) ->
LeafPos < MissingPos end, MissingRevs) of
true ->
[{LeafPos, LeafRevId} | Acc];
@@ -194,7 +252,8 @@ get_db_info(Db) ->
update_seq=SeqNum,
name=Name,
fulldocinfo_by_id_btree=FullDocBtree,
- instance_start_time=StartTime} = Db,
+ instance_start_time=StartTime,
+ committed_update_seq=CommittedUpdateSeq} = Db,
{ok, Size} = couch_file:bytes(Fd),
{ok, {Count, DelCount}} = couch_btree:full_reduce(FullDocBtree),
InfoList = [
@@ -206,7 +265,8 @@ get_db_info(Db) ->
{compact_running, Compactor/=nil},
{disk_size, Size},
{instance_start_time, StartTime},
- {disk_format_version, DiskVersion}
+ {disk_format_version, DiskVersion},
+ {committed_update_seq, CommittedUpdateSeq}
],
{ok, InfoList}.
@@ -221,22 +281,88 @@ get_design_docs(#db{fulldocinfo_by_id_btree=Btree}=Db) ->
[], [{start_key, <<"_design/">>}, {end_key_gt, <<"_design0">>}]),
{ok, Docs}.
-check_is_admin(#db{admins=Admins, user_ctx=#user_ctx{name=Name,roles=Roles}}) ->
- DbAdmins = [<<"_admin">> | Admins],
- case DbAdmins -- [Name | Roles] of
- DbAdmins -> % same list, not an admin
- throw({unauthorized, <<"You are not a db or server admin.">>});
+check_is_admin(#db{user_ctx=#user_ctx{name=Name,roles=Roles}}=Db) ->
+ {Admins} = get_admins(Db),
+ AdminRoles = [<<"_admin">> | couch_util:get_value(<<"roles">>, Admins, [])],
+ AdminNames = couch_util:get_value(<<"names">>, Admins,[]),
+ case AdminRoles -- Roles of
+ AdminRoles -> % same list, not an admin role
+ case AdminNames -- [Name] of
+ AdminNames -> % same names, not an admin
+ throw({unauthorized, <<"You are not a db or server admin.">>});
+ _ ->
+ ok
+ end;
_ ->
ok
end.
-get_admins(#db{admins=Admins}) ->
- Admins.
+check_is_reader(#db{user_ctx=#user_ctx{name=Name,roles=Roles}=UserCtx}=Db) ->
+ case (catch check_is_admin(Db)) of
+ ok -> ok;
+ _ ->
+ {Readers} = get_readers(Db),
+ ReaderRoles = couch_util:get_value(<<"roles">>, Readers,[]),
+ WithAdminRoles = [<<"_admin">> | ReaderRoles],
+ ReaderNames = couch_util:get_value(<<"names">>, Readers,[]),
+ case ReaderRoles ++ ReaderNames of
+ [] -> ok; % no readers == public access
+ _Else ->
+ case WithAdminRoles -- Roles of
+ WithAdminRoles -> % same list, not an reader role
+ case ReaderNames -- [Name] of
+ ReaderNames -> % same names, not a reader
+ ?LOG_DEBUG("Not a reader: UserCtx ~p vs Names ~p Roles ~p",[UserCtx, ReaderNames, WithAdminRoles]),
+ throw({unauthorized, <<"You are not authorized to access this db.">>});
+ _ ->
+ ok
+ end;
+ _ ->
+ ok
+ end
+ end
+ end.
+
+get_admins(#db{security=SecProps}) ->
+ couch_util:get_value(<<"admins">>, SecProps, {[]}).
+
+get_readers(#db{security=SecProps}) ->
+ couch_util:get_value(<<"readers">>, SecProps, {[]}).
-set_admins(#db{update_pid=Pid}=Db, Admins) when is_list(Admins) ->
+get_security(#db{security=SecProps}) ->
+ {SecProps}.
+
+set_security(#db{update_pid=Pid}=Db, {NewSecProps}) when is_list(NewSecProps) ->
check_is_admin(Db),
- gen_server:call(Pid, {set_admins, Admins}, infinity).
+ ok = validate_security_object(NewSecProps),
+ ok = gen_server:call(Pid, {set_security, NewSecProps}, infinity),
+ {ok, _} = ensure_full_commit(Db),
+ ok;
+set_security(_, _) ->
+ throw(bad_request).
+
+validate_security_object(SecProps) ->
+ Admins = couch_util:get_value(<<"admins">>, SecProps, {[]}),
+ Readers = couch_util:get_value(<<"readers">>, SecProps, {[]}),
+ ok = validate_names_and_roles(Admins),
+ ok = validate_names_and_roles(Readers),
+ ok.
+% validate user input
+validate_names_and_roles({Props}) when is_list(Props) ->
+ case couch_util:get_value(<<"names">>,Props,[]) of
+ Ns when is_list(Ns) ->
+ [throw("names must be a JSON list of strings") ||N <- Ns, not is_binary(N)],
+ Ns;
+ _ -> throw("names must be a JSON list of strings")
+ end,
+ case couch_util:get_value(<<"roles">>,Props,[]) of
+ Rs when is_list(Rs) ->
+ [throw("roles must be a JSON list of strings") ||R <- Rs, not is_binary(R)],
+ Rs;
+ _ -> throw("roles must be a JSON list of strings")
+ end,
+ ok.
get_revs_limit(#db{revs_limit=Limit}) ->
Limit.
@@ -257,8 +383,14 @@ update_doc(Db, Doc, Options, UpdateType) ->
case update_docs(Db, [Doc], Options, UpdateType) of
{ok, [{ok, NewRev}]} ->
{ok, NewRev};
+ {ok, [{{_Id, _Rev}, Error}]} ->
+ throw(Error);
{ok, [Error]} ->
- throw(Error)
+ throw(Error);
+ {ok, []} ->
+ % replication success
+ {Pos, [RevId | _]} = Doc#doc.revs,
+ {ok, {Pos, RevId}}
end.
update_docs(Db, Docs) ->
@@ -285,18 +417,8 @@ group_alike_docs([Doc|Rest], [Bucket|RestBuckets]) ->
group_alike_docs(Rest, [[Doc]|[Bucket|RestBuckets]])
end.
-
-validate_doc_update(#db{user_ctx=UserCtx, admins=Admins},
- #doc{id= <<"_design/",_/binary>>}, _GetDiskDocFun) ->
- UserNames = [UserCtx#user_ctx.name | UserCtx#user_ctx.roles],
- % if the user is a server admin or db admin, allow the save
- case length(UserNames -- [<<"_admin">> | Admins]) =:= length(UserNames) of
- true ->
- % not an admin
- {unauthorized, <<"You are not a server or database admin.">>};
- false ->
- ok
- end;
+validate_doc_update(#db{}=Db, #doc{id= <<"_design/",_/binary>>}, _GetDiskDocFun) ->
+ catch check_is_admin(Db);
validate_doc_update(#db{validate_doc_funs=[]}, _Doc, _GetDiskDocFun) ->
ok;
validate_doc_update(_Db, #doc{id= <<"_local/",_/binary>>}, _GetDiskDocFun) ->
@@ -304,7 +426,8 @@ validate_doc_update(_Db, #doc{id= <<"_local/",_/binary>>}, _GetDiskDocFun) ->
validate_doc_update(Db, Doc, GetDiskDocFun) ->
DiskDoc = GetDiskDocFun(),
JsonCtx = couch_util:json_user_ctx(Db),
- try [case Fun(Doc, DiskDoc, JsonCtx) of
+ SecObj = get_security(Db),
+ try [case Fun(Doc, DiskDoc, JsonCtx, SecObj) of
ok -> ok;
Error -> throw(Error)
end || Fun <- Db#db.validate_doc_funs],
@@ -352,15 +475,15 @@ prep_and_validate_update(Db, #doc{id=Id,revs={RevStart, Revs}}=Doc,
prep_and_validate_updates(_Db, [], [], _AllowConflict, AccPrepped,
AccFatalErrors) ->
{AccPrepped, AccFatalErrors};
-prep_and_validate_updates(Db, [DocBucket|RestBuckets], [not_found|RestLookups],
+prep_and_validate_updates(Db, [DocBucket|RestBuckets], [not_found|RestLookups],
AllowConflict, AccPrepped, AccErrors) ->
[#doc{id=Id}|_]=DocBucket,
% no existing revs are known,
{PreppedBucket, AccErrors3} = lists:foldl(
- fun(#doc{revs=Revs}=Doc, {AccBucket, AccErrors2}) ->
+ fun(#doc{revs=Revs}=Doc, {AccBucket, AccErrors2}) ->
case couch_doc:has_stubs(Doc) of
true ->
- couch_doc:merge_doc(Doc, #doc{}); % will throw exception
+ couch_doc:merge_stubs(Doc, #doc{}); % will throw exception
false -> ok
end,
case Revs of
@@ -398,12 +521,12 @@ prep_and_validate_updates(Db, [DocBucket|RestBuckets],
end
end,
{[], AccErrors}, DocBucket),
- prep_and_validate_updates(Db, RestBuckets, RestLookups, AllowConflict,
+ prep_and_validate_updates(Db, RestBuckets, RestLookups, AllowConflict,
[PreppedBucket | AccPrepped], AccErrors3).
-update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options) ->
- update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options, interactive_edit).
+update_docs(Db, Docs, Options) ->
+ update_docs(Db, Docs, Options, interactive_edit).
prep_and_validate_replicated_updates(_Db, [], [], AccPrepped, AccErrors) ->
@@ -414,10 +537,10 @@ prep_and_validate_replicated_updates(Db, [Bucket|RestBuckets], [OldInfo|RestOldI
case OldInfo of
not_found ->
{ValidatedBucket, AccErrors3} = lists:foldl(
- fun(Doc, {AccPrepped2, AccErrors2}) ->
+ fun(Doc, {AccPrepped2, AccErrors2}) ->
case couch_doc:has_stubs(Doc) of
true ->
- couch_doc:merge_doc(Doc, #doc{}); % will throw exception
+ couch_doc:merge_stubs(Doc, #doc{}); % will throw exception
false -> ok
end,
case validate_doc_update(Db, Doc, fun() -> nil end) of
@@ -432,7 +555,7 @@ prep_and_validate_replicated_updates(Db, [Bucket|RestBuckets], [OldInfo|RestOldI
{ok, #full_doc_info{rev_tree=OldTree}} ->
NewRevTree = lists:foldl(
fun(NewDoc, AccTree) ->
- {NewTree, _} = couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]),
+ {NewTree, _} = couch_key_tree:merge(AccTree, couch_db:doc_to_tree(NewDoc)),
NewTree
end,
OldTree, Bucket),
@@ -487,7 +610,7 @@ new_revid(#doc{body=Body,revs={OldStart,OldRevs},
?l2b(integer_to_list(couch_util:rand32()));
Atts2 ->
OldRev = case OldRevs of [] -> 0; [OldRev0|_] -> OldRev0 end,
- erlang:md5(term_to_binary([Deleted, OldStart, OldRev, Body, Atts2]))
+ couch_util:md5(term_to_binary([Deleted, OldStart, OldRev, Body, Atts2]))
end.
new_revs([], OutBuckets, IdRevsAcc) ->
@@ -515,7 +638,7 @@ check_dup_atts2(_) ->
update_docs(Db, Docs, Options, replicated_changes) ->
- couch_stats_collector:increment({couchdb, database_writes}),
+ increment_stat(Db, {couchdb, database_writes}),
DocBuckets = group_alike_docs(Docs),
case (Db#db.validate_doc_funs /= []) orelse
@@ -541,7 +664,7 @@ update_docs(Db, Docs, Options, replicated_changes) ->
{ok, DocErrors};
update_docs(Db, Docs, Options, interactive_edit) ->
- couch_stats_collector:increment({couchdb, database_writes}),
+ increment_stat(Db, {couchdb, database_writes}),
AllOrNothing = lists:member(all_or_nothing, Options),
% go ahead and generate the new revision ids for the documents.
% separate out the NonRep documents from the rest of the documents
@@ -645,7 +768,7 @@ collect_results(UpdatePid, MRef, ResultsAcc) ->
exit(Reason)
end.
-write_and_commit(#db{update_pid=UpdatePid, user_ctx=Ctx}=Db, DocBuckets,
+write_and_commit(#db{update_pid=UpdatePid}=Db, DocBuckets,
NonRepDocs, Options0) ->
Options = set_commit_option(Options0),
MergeConflicts = lists:member(merge_conflicts, Options),
@@ -658,7 +781,7 @@ write_and_commit(#db{update_pid=UpdatePid, user_ctx=Ctx}=Db, DocBuckets,
retry ->
% This can happen if the db file we wrote to was swapped out by
% compaction. Retry by reopening the db and writing to the current file
- {ok, Db2} = open_ref_counted(Db#db.main_pid, Ctx),
+ {ok, Db2} = open_ref_counted(Db#db.main_pid, self()),
DocBuckets2 = [[doc_flush_atts(Doc, Db2#db.fd) || Doc <- Bucket] || Bucket <- DocBuckets],
% We only retry once
close(Db2),
@@ -693,18 +816,19 @@ flush_att(Fd, #att{data={Fd0, _}}=Att) when Fd0 == Fd ->
% already written to our file, nothing to write
Att;
-flush_att(Fd, #att{data={OtherFd,StreamPointer}, md5=InMd5}=Att) ->
- {NewStreamData, Len, Md5} =
+flush_att(Fd, #att{data={OtherFd,StreamPointer}, md5=InMd5,
+ disk_len=InDiskLen} = Att) ->
+ {NewStreamData, Len, _IdentityLen, Md5, IdentityMd5} =
couch_stream:copy_to_new_stream(OtherFd, StreamPointer, Fd),
- check_md5(Md5, InMd5),
- Att#att{data={Fd, NewStreamData}, md5=Md5, len=Len};
+ check_md5(IdentityMd5, InMd5),
+ Att#att{data={Fd, NewStreamData}, md5=Md5, att_len=Len, disk_len=InDiskLen};
flush_att(Fd, #att{data=Data}=Att) when is_binary(Data) ->
with_stream(Fd, Att, fun(OutputStream) ->
couch_stream:write(OutputStream, Data)
end);
-flush_att(Fd, #att{data=Fun,len=undefined}=Att) when is_function(Fun) ->
+flush_att(Fd, #att{data=Fun,att_len=undefined}=Att) when is_function(Fun) ->
with_stream(Fd, Att, fun(OutputStream) ->
% Fun(MaxChunkSize, WriterFun) must call WriterFun
% once for each chunk of the attachment,
@@ -726,9 +850,9 @@ flush_att(Fd, #att{data=Fun,len=undefined}=Att) when is_function(Fun) ->
end, ok)
end);
-flush_att(Fd, #att{data=Fun,len=Len}=Att) when is_function(Fun) ->
+flush_att(Fd, #att{data=Fun,att_len=AttLen}=Att) when is_function(Fun) ->
with_stream(Fd, Att, fun(OutputStream) ->
- write_streamed_attachment(OutputStream, Fun, Len)
+ write_streamed_attachment(OutputStream, Fun, AttLen)
end).
% From RFC 2616 3.6.1 - Chunked Transfer Coding
@@ -741,8 +865,17 @@ flush_att(Fd, #att{data=Fun,len=Len}=Att) when is_function(Fun) ->
% is present in the request, but there is no Content-MD5
% trailer, we're free to ignore this inconsistency and
% pretend that no Content-MD5 exists.
-with_stream(Fd, #att{md5=InMd5}=Att, Fun) ->
- {ok, OutputStream} = couch_stream:open(Fd),
+with_stream(Fd, #att{md5=InMd5,type=Type,encoding=Enc}=Att, Fun) ->
+ {ok, OutputStream} = case (Enc =:= identity) andalso
+ couch_util:compressible_att_type(Type) of
+ true ->
+ CompLevel = list_to_integer(
+ couch_config:get("attachments", "compression_level", "0")
+ ),
+ couch_stream:open(Fd, gzip, [{compression_level, CompLevel}]);
+ _ ->
+ couch_stream:open(Fd)
+ end,
ReqMd5 = case Fun(OutputStream) of
{md5, FooterMd5} ->
case InMd5 of
@@ -752,9 +885,36 @@ with_stream(Fd, #att{md5=InMd5}=Att, Fun) ->
_ ->
InMd5
end,
- {StreamInfo, Len, Md5} = couch_stream:close(OutputStream),
- check_md5(Md5, ReqMd5),
- Att#att{data={Fd,StreamInfo},len=Len,md5=Md5}.
+ {StreamInfo, Len, IdentityLen, Md5, IdentityMd5} =
+ couch_stream:close(OutputStream),
+ check_md5(IdentityMd5, ReqMd5),
+ {AttLen, DiskLen, NewEnc} = case Enc of
+ identity ->
+ case {Md5, IdentityMd5} of
+ {Same, Same} ->
+ {Len, IdentityLen, identity};
+ _ ->
+ {Len, IdentityLen, gzip}
+ end;
+ gzip ->
+ case {Att#att.att_len, Att#att.disk_len} of
+ {AL, DL} when AL =:= undefined orelse DL =:= undefined ->
+ % Compressed attachment uploaded through the standalone API.
+ {Len, Len, gzip};
+ {AL, DL} ->
+ % This case is used for efficient push-replication, where a
+ % compressed attachment is located in the body of multipart
+ % content-type request.
+ {AL, DL, gzip}
+ end
+ end,
+ Att#att{
+ data={Fd,StreamInfo},
+ att_len=AttLen,
+ disk_len=DiskLen,
+ md5=Md5,
+ encoding=NewEnc
+ }.
write_streamed_attachment(_Stream, _F, 0) ->
@@ -779,17 +939,18 @@ changes_since(Db, Style, StartSeq, Fun, Acc) ->
changes_since(Db, Style, StartSeq, Fun, Options, Acc) ->
Wrapper = fun(DocInfo, _Offset, Acc2) ->
#doc_info{revs=Revs} = DocInfo,
+ DocInfo2 =
case Style of
main_only ->
- Infos = [DocInfo];
+ DocInfo;
all_docs ->
- % make each rev it's own doc info
- Infos = [DocInfo#doc_info{revs=[RevInfo]} ||
- #rev_info{seq=RevSeq}=RevInfo <- Revs, StartSeq < RevSeq]
+ % remove revs before the seq
+ DocInfo#doc_info{revs=[RevInfo ||
+ #rev_info{seq=RevSeq}=RevInfo <- Revs, StartSeq < RevSeq]}
end,
- Fun(Infos, Acc2)
+ Fun(DocInfo2, Acc2)
end,
- {ok, _LastReduction, AccOut} = couch_btree:fold(Db#db.docinfo_by_seq_btree,
+ {ok, _LastReduction, AccOut} = couch_btree:fold(Db#db.docinfo_by_seq_btree,
Wrapper, Acc, [{start_key, StartSeq + 1}] ++ Options),
{ok, AccOut}.
@@ -816,11 +977,17 @@ init({DbName, Filepath, Fd, Options}) ->
{ok, UpdaterPid} = gen_server:start_link(couch_db_updater, {self(), DbName, Filepath, Fd, Options}, []),
{ok, #db{fd_ref_counter=RefCntr}=Db} = gen_server:call(UpdaterPid, get_db),
couch_ref_counter:add(RefCntr),
- couch_stats_collector:track_process_count({couchdb, open_databases}),
+ case lists:member(sys_db, Options) of
+ true ->
+ ok;
+ false ->
+ couch_stats_collector:track_process_count({couchdb, open_databases})
+ end,
+ process_flag(trap_exit, true),
{ok, Db}.
-terminate(Reason, _Db) ->
- couch_util:terminate_linked(Reason),
+terminate(_Reason, Db) ->
+ couch_util:shutdown_sync(Db#db.update_pid),
ok.
handle_call({open_ref_count, OpenerPid}, _, #db{fd_ref_counter=RefCntr}=Db) ->
@@ -839,7 +1006,9 @@ handle_call({db_updated, NewDb}, _From, #db{fd_ref_counter=OldRefCntr}) ->
couch_ref_counter:add(NewRefCntr),
couch_ref_counter:drop(OldRefCntr)
end,
- {reply, ok, NewDb}.
+ {reply, ok, NewDb};
+handle_call(get_db, _From, Db) ->
+ {reply, {ok, Db}, Db}.
handle_cast(Msg, Db) ->
@@ -848,7 +1017,11 @@ handle_cast(Msg, Db) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-
+
+handle_info({'EXIT', _Pid, normal}, Db) ->
+ {noreply, Db};
+handle_info({'EXIT', _Pid, Reason}, Server) ->
+ {stop, Reason, Server};
handle_info(Msg, Db) ->
?LOG_ERROR("Bad message received for db ~s: ~p", [Db#db.name, Msg]),
exit({error, Msg}).
@@ -983,17 +1156,39 @@ make_doc(#db{fd=Fd}=Db, Id, Deleted, Bp, RevisionPath) ->
{ok, {BodyData0, Atts0}} = read_doc(Db, Bp),
{BodyData0,
lists:map(
- fun({Name,Type,Sp,Len,RevPos,Md5}) ->
+ fun({Name,Type,Sp,AttLen,DiskLen,RevPos,Md5,Enc}) ->
#att{name=Name,
type=Type,
- len=Len,
+ att_len=AttLen,
+ disk_len=DiskLen,
+ md5=Md5,
+ revpos=RevPos,
+ data={Fd,Sp},
+ encoding=
+ case Enc of
+ true ->
+ % 0110 UPGRADE CODE
+ gzip;
+ false ->
+ % 0110 UPGRADE CODE
+ identity;
+ _ ->
+ Enc
+ end
+ };
+ ({Name,Type,Sp,AttLen,RevPos,Md5}) ->
+ #att{name=Name,
+ type=Type,
+ att_len=AttLen,
+ disk_len=AttLen,
md5=Md5,
revpos=RevPos,
data={Fd,Sp}};
- ({Name,{Type,Sp,Len}}) ->
+ ({Name,{Type,Sp,AttLen}}) ->
#att{name=Name,
type=Type,
- len=Len,
+ att_len=AttLen,
+ disk_len=AttLen,
md5= <<>>,
revpos=0,
data={Fd,Sp}}
@@ -1008,4 +1203,7 @@ make_doc(#db{fd=Fd}=Db, Id, Deleted, Bp, RevisionPath) ->
}.
-
+increment_stat(#db{is_sys_db = true}, _Stat) ->
+ ok;
+increment_stat(#db{}, Stat) ->
+ couch_stats_collector:increment(Stat).
diff --git a/src/couchdb/couch_db.hrl b/src/couchdb/couch_db.hrl
index 99ef8997..74d2c630 100644
--- a/src/couchdb/couch_db.hrl
+++ b/src/couchdb/couch_db.hrl
@@ -20,7 +20,6 @@
-define(JSON_ENCODE(V), couch_util:json_encode(V)).
-define(JSON_DECODE(V), couch_util:json_decode(V)).
--define(b2a(V), list_to_atom(binary_to_list(V))).
-define(b2l(V), binary_to_list(V)).
-define(l2b(V), list_to_binary(V)).
@@ -28,18 +27,23 @@
-define(LOG_DEBUG(Format, Args),
case couch_log:debug_on() of
- true -> error_logger:info_report(couch_debug, {Format, Args});
+ true ->
+ gen_event:sync_notify(error_logger,
+ {self(), couch_debug, {Format, Args}});
false -> ok
end).
-define(LOG_INFO(Format, Args),
case couch_log:info_on() of
- true -> error_logger:info_report(couch_info, {Format, Args});
+ true ->
+ gen_event:sync_notify(error_logger,
+ {self(), couch_info, {Format, Args}});
false -> ok
end).
-define(LOG_ERROR(Format, Args),
- error_logger:error_report(couch_error, {Format, Args})).
+ gen_event:sync_notify(error_logger,
+ {self(), couch_error, {Format, Args}})).
-record(rev_info,
@@ -68,12 +72,15 @@
{mochi_req,
peer,
method,
+ requested_path_parts,
path_parts,
db_url_handlers,
user_ctx,
req_body = undefined,
design_url_handlers,
- auth
+ auth,
+ default_fun,
+ url_handlers
}).
@@ -99,10 +106,17 @@
{
name,
type,
- len,
+ att_len,
+ disk_len, % length of the attachment in its identity form
+ % (that is, without a content encoding applied to it)
+ % differs from att_len when encoding /= identity
md5= <<>>,
revpos=0,
- data
+ data,
+ encoding=identity % currently supported values are:
+ % identity, gzip
+ % additional values to support in the future:
+ % deflate, compress
}).
@@ -110,8 +124,7 @@
{
name=null,
roles=[],
- handler,
- user_doc
+ handler
}).
% This should be updated anytime a header change happens that requires more
@@ -124,7 +137,7 @@
% if the disk revision is incremented, then new upgrade logic will need to be
% added to couch_db_updater:init_db.
--define(LATEST_DISK_VERSION, 4).
+-define(LATEST_DISK_VERSION, 5).
-record(db_header,
{disk_version = ?LATEST_DISK_VERSION,
@@ -135,7 +148,7 @@
local_docs_btree_state = nil,
purge_seq = 0,
purged_docs = nil,
- admins_ptr = nil,
+ security_ptr = nil,
revs_limit = 1000
}).
@@ -155,12 +168,13 @@
name,
filepath,
validate_doc_funs = [],
- admins = [],
- admins_ptr = nil,
+ security = [],
+ security_ptr = nil,
user_ctx = #user_ctx{},
waiting_delayed_commit = nil,
revs_limit = 1000,
- fsync_options = []
+ fsync_options = [],
+ is_sys_db = false
}).
@@ -214,6 +228,7 @@
def_lang,
design_options=[],
views,
+ lib,
id_btree=nil,
current_seq=0,
purge_seq=0,
@@ -223,6 +238,8 @@
-record(view,
{id_num,
+ update_seq=0,
+ purge_seq=0,
map_names=[],
def,
btree=nil,
@@ -251,7 +268,13 @@
body = nil,
options = [
{response_format,binary},
- {inactivity_timeout, 30000}
+ {inactivity_timeout, 30000},
+ {max_sessions, list_to_integer(
+ couch_config:get("replicator", "max_http_sessions", "10")
+ )},
+ {max_pipeline_size, list_to_integer(
+ couch_config:get("replicator", "max_http_pipeline_size", "10")
+ )}
],
retries = 10,
pause = 500,
@@ -260,3 +283,17 @@
% small value used in revision trees to indicate the revision isn't stored
-define(REV_MISSING, []).
+
+-record(changes_args, {
+ feed = "normal",
+ dir = fwd,
+ since = 0,
+ limit = 1000000000000000,
+ style = main_only,
+ heartbeat,
+ timeout,
+ filter = "",
+ include_docs = false,
+ db_open_options = []
+}).
+
diff --git a/src/couchdb/couch_db_updater.erl b/src/couchdb/couch_db_updater.erl
index 7292221a..633ae230 100644
--- a/src/couchdb/couch_db_updater.erl
+++ b/src/couchdb/couch_db_updater.erl
@@ -20,25 +20,38 @@
init({MainPid, DbName, Filepath, Fd, Options}) ->
+ process_flag(trap_exit, true),
case lists:member(create, Options) of
true ->
% create a new header and writes it to the file
Header = #db_header{},
ok = couch_file:write_header(Fd, Header),
% delete any old compaction files that might be hanging around
- file:delete(Filepath ++ ".compact");
+ RootDir = couch_config:get("couchdb", "database_dir", "."),
+ couch_file:delete(RootDir, Filepath ++ ".compact");
false ->
ok = couch_file:upgrade_old_header(Fd, <<$g, $m, $k, 0>>), % 09 UPGRADE CODE
- {ok, Header} = couch_file:read_header(Fd)
+ case couch_file:read_header(Fd) of
+ {ok, Header} ->
+ ok;
+ no_valid_header ->
+ % create a new header and writes it to the file
+ Header = #db_header{},
+ ok = couch_file:write_header(Fd, Header),
+ % delete any old compaction files that might be hanging around
+ file:delete(Filepath ++ ".compact")
+ end
end,
Db = init_db(DbName, Filepath, Fd, Header),
Db2 = refresh_validate_doc_funs(Db),
- {ok, Db2#db{main_pid=MainPid}}.
+ {ok, Db2#db{main_pid = MainPid, is_sys_db = lists:member(sys_db, Options)}}.
-terminate(Reason, _Srv) ->
- couch_util:terminate_linked(Reason),
+terminate(_Reason, Db) ->
+ couch_file:close(Db#db.fd),
+ couch_util:shutdown_sync(Db#db.compactor_pid),
+ couch_util:shutdown_sync(Db#db.fd_ref_counter),
ok.
handle_call(get_db, _From, Db) ->
@@ -53,9 +66,9 @@ handle_call(increment_update_seq, _From, Db) ->
couch_db_update_notifier:notify({updated, Db#db.name}),
{reply, {ok, Db2#db.update_seq}, Db2};
-handle_call({set_admins, NewAdmins}, _From, Db) ->
- {ok, Ptr} = couch_file:append_term(Db#db.fd, NewAdmins),
- Db2 = commit_data(Db#db{admins=NewAdmins, admins_ptr=Ptr,
+handle_call({set_security, NewSec}, _From, Db) ->
+ {ok, Ptr} = couch_file:append_term(Db#db.fd, NewSec),
+ Db2 = commit_data(Db#db{security=NewSec, security_ptr=Ptr,
update_seq=Db#db.update_seq+1}),
ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
{reply, ok, Db2};
@@ -130,21 +143,22 @@ handle_call({purge_docs, IdRevs}, _From, Db) ->
ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
couch_db_update_notifier:notify({updated, Db#db.name}),
- {reply, {ok, Db2#db.update_seq, IdRevsPurged}, Db2}.
-
-
-handle_cast(start_compact, Db) ->
+ {reply, {ok, (Db2#db.header)#db_header.purge_seq, IdRevsPurged}, Db2};
+handle_call(start_compact, _From, Db) ->
case Db#db.compactor_pid of
nil ->
?LOG_INFO("Starting compaction for db \"~s\"", [Db#db.name]),
Pid = spawn_link(fun() -> start_copy_compact(Db) end),
Db2 = Db#db{compactor_pid=Pid},
ok = gen_server:call(Db#db.main_pid, {db_updated, Db2}),
- {noreply, Db2};
+ {reply, ok, Db2};
_ ->
% compact currently running, this is a no-op
- {noreply, Db}
- end;
+ {reply, ok, Db}
+ end.
+
+
+
handle_cast({compact_done, CompactFilepath}, #db{filepath=Filepath}=Db) ->
{ok, NewFd} = couch_file:open(CompactFilepath),
{ok, NewHeader} = couch_file:read_header(NewFd),
@@ -158,17 +172,25 @@ handle_cast({compact_done, CompactFilepath}, #db{filepath=Filepath}=Db) ->
fun(Value, _Offset, Acc) -> {ok, [Value | Acc]} end, []),
{ok, NewLocalBtree} = couch_btree:add(NewDb#db.local_docs_btree, LocalDocs),
- NewDb2 = commit_data( NewDb#db{local_docs_btree=NewLocalBtree,
- main_pid = Db#db.main_pid,filepath = Filepath}),
+ NewDb2 = commit_data(NewDb#db{
+ local_docs_btree = NewLocalBtree,
+ main_pid = Db#db.main_pid,
+ filepath = Filepath,
+ instance_start_time = Db#db.instance_start_time,
+ revs_limit = Db#db.revs_limit
+ }),
?LOG_DEBUG("CouchDB swapping files ~s and ~s.",
[Filepath, CompactFilepath]),
- file:delete(Filepath),
+ RootDir = couch_config:get("couchdb", "database_dir", "."),
+ couch_file:delete(RootDir, Filepath),
ok = file:rename(CompactFilepath, Filepath),
close_db(Db),
- ok = gen_server:call(Db#db.main_pid, {db_updated, NewDb2}),
+ NewDb3 = refresh_validate_doc_funs(NewDb2),
+ ok = gen_server:call(Db#db.main_pid, {db_updated, NewDb3}, infinity),
+ couch_db_update_notifier:notify({compacted, NewDb3#db.name}),
?LOG_INFO("Compaction for db \"~s\" completed.", [Db#db.name]),
- {noreply, NewDb2#db{compactor_pid=nil}};
+ {noreply, NewDb3#db{compactor_pid=nil}};
false ->
?LOG_INFO("Compaction file still behind main file "
"(update seq=~p. compact update seq=~p). Retrying.",
@@ -180,11 +202,11 @@ handle_cast({compact_done, CompactFilepath}, #db{filepath=Filepath}=Db) ->
end.
-handle_info({update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts,
+handle_info({update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts,
FullCommit}, Db) ->
GroupedDocs2 = [[{Client, D} || D <- DocGroup] || DocGroup <- GroupedDocs],
if NonRepDocs == [] ->
- {GroupedDocs3, Clients, FullCommit2} = collect_updates(GroupedDocs2,
+ {GroupedDocs3, Clients, FullCommit2} = collect_updates(GroupedDocs2,
[Client], MergeConflicts, FullCommit);
true ->
GroupedDocs3 = GroupedDocs2,
@@ -192,7 +214,7 @@ handle_info({update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts,
Clients = [Client]
end,
NonRepDocs2 = [{Client, NRDoc} || NRDoc <- NonRepDocs],
- try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts,
+ try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts,
FullCommit2) of
{ok, Db2} ->
ok = gen_server:call(Db#db.main_pid, {db_updated, Db2}),
@@ -207,6 +229,9 @@ handle_info({update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts,
[catch(ClientPid ! {retry, self()}) || ClientPid <- Clients],
{noreply, Db}
end;
+handle_info(delayed_commit, #db{waiting_delayed_commit=nil}=Db) ->
+ %no outstanding delayed commits, ignore
+ {noreply, Db};
handle_info(delayed_commit, Db) ->
case commit_data(Db) of
Db ->
@@ -214,7 +239,11 @@ handle_info(delayed_commit, Db) ->
Db2 ->
ok = gen_server:call(Db2#db.main_pid, {db_updated, Db2}),
{noreply, Db2}
- end.
+ end;
+handle_info({'EXIT', _Pid, normal}, Db) ->
+ {noreply, Db};
+handle_info({'EXIT', _Pid, Reason}, Db) ->
+ {stop, Reason, Db}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
@@ -243,7 +272,7 @@ collect_updates(GroupedDocsAcc, ClientsAcc, MergeConflicts, FullCommit) ->
{update_docs, Client, GroupedDocs, [], MergeConflicts, FullCommit2} ->
GroupedDocs2 = [[{Client, Doc} || Doc <- DocGroup]
|| DocGroup <- GroupedDocs],
- GroupedDocsAcc2 =
+ GroupedDocsAcc2 =
merge_updates(GroupedDocsAcc, GroupedDocs2, []),
collect_updates(GroupedDocsAcc2, [Client | ClientsAcc],
MergeConflicts, (FullCommit or FullCommit2))
@@ -326,7 +355,7 @@ btree_by_seq_reduce(rereduce, Reds) ->
simple_upgrade_record(Old, New) when tuple_size(Old) =:= tuple_size(New) ->
Old;
-simple_upgrade_record(Old, New) ->
+simple_upgrade_record(Old, New) when tuple_size(Old) < tuple_size(New) ->
OldSz = tuple_size(Old),
NewValuesTail =
lists:sublist(tuple_to_list(New), OldSz + 1, tuple_size(New) - OldSz),
@@ -337,9 +366,10 @@ init_db(DbName, Filepath, Fd, Header0) ->
Header1 = simple_upgrade_record(Header0, #db_header{}),
Header =
case element(2, Header1) of
- 1 -> Header1#db_header{unused = 0}; % 0.9
- 2 -> Header1#db_header{unused = 0}; % post 0.9 and pre 0.10
- 3 -> Header1; % post 0.9 and pre 0.10
+ 1 -> Header1#db_header{unused = 0, security_ptr = nil}; % 0.9
+ 2 -> Header1#db_header{unused = 0, security_ptr = nil}; % post 0.9 and pre 0.10
+ 3 -> Header1#db_header{security_ptr = nil}; % post 0.9 and pre 0.10
+ 4 -> Header1#db_header{security_ptr = nil}; % 0.10 and pre 0.11
?LATEST_DISK_VERSION -> Header1;
_ -> throw({database_disk_version_error, "Incorrect disk header version"})
end,
@@ -362,12 +392,12 @@ init_db(DbName, Filepath, Fd, Header0) ->
{join, fun(X,Y) -> btree_by_seq_join(X,Y) end},
{reduce, fun(X,Y) -> btree_by_seq_reduce(X,Y) end}]),
{ok, LocalDocsBtree} = couch_btree:open(Header#db_header.local_docs_btree_state, Fd),
- case Header#db_header.admins_ptr of
+ case Header#db_header.security_ptr of
nil ->
- Admins = [],
- AdminsPtr = nil;
- AdminsPtr ->
- {ok, Admins} = couch_file:pread_term(Fd, AdminsPtr)
+ Security = [],
+ SecurityPtr = nil;
+ SecurityPtr ->
+ {ok, Security} = couch_file:pread_term(Fd, SecurityPtr)
end,
% convert start time tuple to microsecs and store as a binary string
{MegaSecs, Secs, MicroSecs} = now(),
@@ -386,8 +416,8 @@ init_db(DbName, Filepath, Fd, Header0) ->
update_seq = Header#db_header.update_seq,
name = DbName,
filepath = Filepath,
- admins = Admins,
- admins_ptr = AdminsPtr,
+ security = Security,
+ security_ptr = SecurityPtr,
instance_start_time = StartTime,
revs_limit = Header#db_header.revs_limit,
fsync_options = FsyncOptions
@@ -429,8 +459,9 @@ flush_trees(#db{fd=Fd,header=Header}=Db,
case Atts of
[] -> [];
[#att{data={BinFd, _Sp}} | _ ] when BinFd == Fd ->
- [{N,T,P,L,R,M}
- || #att{name=N,type=T,data={_,P},md5=M,revpos=R,len=L}
+ [{N,T,P,AL,DL,R,M,E}
+ || #att{name=N,type=T,data={_,P},md5=M,revpos=R,
+ att_len=AL,disk_len=DL,encoding=E}
<- Atts];
_ ->
% BinFd must not equal our Fd. This can happen when a database
@@ -458,16 +489,16 @@ send_result(Client, Id, OriginalRevs, NewResult) ->
% used to send a result to the client
catch(Client ! {result, self(), {{Id, OriginalRevs}, NewResult}}).
-merge_rev_trees(_MergeConflicts, [], [], AccNewInfos, AccRemoveSeqs, AccSeq) ->
+merge_rev_trees(_Limit, _Merge, [], [], AccNewInfos, AccRemoveSeqs, AccSeq) ->
{ok, lists:reverse(AccNewInfos), AccRemoveSeqs, AccSeq};
-merge_rev_trees(MergeConflicts, [NewDocs|RestDocsList],
+merge_rev_trees(Limit, MergeConflicts, [NewDocs|RestDocsList],
[OldDocInfo|RestOldInfo], AccNewInfos, AccRemoveSeqs, AccSeq) ->
#full_doc_info{id=Id,rev_tree=OldTree,deleted=OldDeleted,update_seq=OldSeq}
= OldDocInfo,
- NewRevTree = lists:foldl(
+ NewRevTree0 = lists:foldl(
fun({Client, #doc{revs={Pos,[_Rev|PrevRevs]}}=NewDoc}, AccTree) ->
if not MergeConflicts ->
- case couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]) of
+ case couch_key_tree:merge(AccTree, couch_db:doc_to_tree(NewDoc)) of
{_NewTree, conflicts} when (not OldDeleted) ->
send_result(Client, Id, {Pos-1,PrevRevs}, conflict),
AccTree;
@@ -492,15 +523,15 @@ merge_rev_trees(MergeConflicts, [NewDocs|RestDocsList],
% this means we are recreating a brand new document
% into a state that already existed before.
% put the rev into a subsequent edit of the deletion
- #doc_info{revs=[#rev_info{rev={OldPos,OldRev}}|_]} =
+ #doc_info{revs=[#rev_info{rev={OldPos,OldRev}}|_]} =
couch_doc:to_doc_info(OldDocInfo),
NewRevId = couch_db:new_revid(
NewDoc#doc{revs={OldPos, [OldRev]}}),
NewDoc2 = NewDoc#doc{revs={OldPos + 1, [NewRevId, OldRev]}},
{NewTree2, _} = couch_key_tree:merge(AccTree,
- [couch_db:doc_to_tree(NewDoc2)]),
+ couch_db:doc_to_tree(NewDoc2)),
% we changed the rev id, this tells the caller we did
- send_result(Client, Id, {Pos-1,PrevRevs},
+ send_result(Client, Id, {Pos-1,PrevRevs},
{ok, {OldPos + 1, NewRevId}}),
NewTree2;
true ->
@@ -512,15 +543,16 @@ merge_rev_trees(MergeConflicts, [NewDocs|RestDocsList],
end;
true ->
{NewTree, _} = couch_key_tree:merge(AccTree,
- [couch_db:doc_to_tree(NewDoc)]),
+ couch_db:doc_to_tree(NewDoc)),
NewTree
- end
+ end
end,
OldTree, NewDocs),
+ NewRevTree = couch_key_tree:stem(NewRevTree0, Limit),
if NewRevTree == OldTree ->
% nothing changed
- merge_rev_trees(MergeConflicts, RestDocsList, RestOldInfo, AccNewInfos,
- AccRemoveSeqs, AccSeq);
+ merge_rev_trees(Limit, MergeConflicts, RestDocsList, RestOldInfo,
+ AccNewInfos, AccRemoveSeqs, AccSeq);
true ->
% we have updated the document, give it a new seq #
NewInfo = #full_doc_info{id=Id,update_seq=AccSeq+1,rev_tree=NewRevTree},
@@ -528,8 +560,8 @@ merge_rev_trees(MergeConflicts, [NewDocs|RestDocsList],
0 -> AccRemoveSeqs;
_ -> [OldSeq | AccRemoveSeqs]
end,
- merge_rev_trees(MergeConflicts, RestDocsList, RestOldInfo,
- [NewInfo|AccNewInfos], RemoveSeqs, AccSeq+1)
+ merge_rev_trees(Limit, MergeConflicts, RestDocsList, RestOldInfo,
+ [NewInfo|AccNewInfos], RemoveSeqs, AccSeq+1)
end.
@@ -552,7 +584,8 @@ update_docs_int(Db, DocsList, NonRepDocs, MergeConflicts, FullCommit) ->
#db{
fulldocinfo_by_id_btree = DocInfoByIdBTree,
docinfo_by_seq_btree = DocInfoBySeqBTree,
- update_seq = LastSeq
+ update_seq = LastSeq,
+ revs_limit = RevsLimit
} = Db,
Ids = [Id || [{_Client, #doc{id=Id}}|_] <- DocsList],
% lookup up the old documents, if they exist.
@@ -565,11 +598,9 @@ update_docs_int(Db, DocsList, NonRepDocs, MergeConflicts, FullCommit) ->
end,
Ids, OldDocLookups),
% Merge the new docs into the revision trees.
- {ok, NewDocInfos0, RemoveSeqs, NewSeq} = merge_rev_trees(
+ {ok, NewFullDocInfos, RemoveSeqs, NewSeq} = merge_rev_trees(RevsLimit,
MergeConflicts, DocsList, OldDocInfos, [], [], LastSeq),
- NewFullDocInfos = stem_full_doc_infos(Db, NewDocInfos0),
-
% All documents are now ready to write.
{ok, Db2} = update_local_docs(Db, NonRepDocs),
@@ -654,35 +685,35 @@ db_to_header(Db, Header) ->
docinfo_by_seq_btree_state = couch_btree:get_state(Db#db.docinfo_by_seq_btree),
fulldocinfo_by_id_btree_state = couch_btree:get_state(Db#db.fulldocinfo_by_id_btree),
local_docs_btree_state = couch_btree:get_state(Db#db.local_docs_btree),
- admins_ptr = Db#db.admins_ptr,
+ security_ptr = Db#db.security_ptr,
revs_limit = Db#db.revs_limit}.
-commit_data(#db{fd=Fd,header=OldHeader,fsync_options=FsyncOptions}=Db, Delay) ->
- Header = db_to_header(Db, OldHeader),
- if OldHeader == Header ->
- Db;
- Delay and (Db#db.waiting_delayed_commit == nil) ->
- Db#db{waiting_delayed_commit=
- erlang:send_after(1000, self(), delayed_commit)};
- Delay ->
- Db;
- true ->
- if Db#db.waiting_delayed_commit /= nil ->
- case erlang:cancel_timer(Db#db.waiting_delayed_commit) of
- false -> receive delayed_commit -> ok after 0 -> ok end;
- _ -> ok
- end;
- true -> ok
- end,
+commit_data(#db{waiting_delayed_commit=nil} = Db, true) ->
+ Db#db{waiting_delayed_commit=erlang:send_after(1000,self(),delayed_commit)};
+commit_data(Db, true) ->
+ Db;
+commit_data(Db, _) ->
+ #db{
+ fd = Fd,
+ filepath = Filepath,
+ header = OldHeader,
+ fsync_options = FsyncOptions,
+ waiting_delayed_commit = Timer
+ } = Db,
+ if is_reference(Timer) -> erlang:cancel_timer(Timer); true -> ok end,
+ case db_to_header(Db, OldHeader) of
+ OldHeader ->
+ Db#db{waiting_delayed_commit=nil};
+ Header ->
case lists:member(before_header, FsyncOptions) of
- true -> ok = couch_file:sync(Fd);
+ true -> ok = couch_file:sync(Filepath);
_ -> ok
end,
ok = couch_file:write_header(Fd, Header),
case lists:member(after_header, FsyncOptions) of
- true -> ok = couch_file:sync(Fd);
+ true -> ok = couch_file:sync(Filepath);
_ -> ok
end,
@@ -696,21 +727,41 @@ copy_doc_attachments(#db{fd=SrcFd}=SrcDb, {Pos,_RevId}, SrcSp, DestFd) ->
{ok, {BodyData, BinInfos}} = couch_db:read_doc(SrcDb, SrcSp),
% copy the bin values
NewBinInfos = lists:map(
- fun({Name, {Type, BinSp, Len}}) when is_tuple(BinSp) orelse BinSp == null ->
+ fun({Name, {Type, BinSp, AttLen}}) when is_tuple(BinSp) orelse BinSp == null ->
% 09 UPGRADE CODE
- {NewBinSp, Len, Md5} = couch_stream:old_copy_to_new_stream(SrcFd, BinSp, Len, DestFd),
- {Name, Type, NewBinSp, Len, Pos, Md5};
- ({Name, {Type, BinSp, Len}}) ->
+ {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
+ couch_stream:old_copy_to_new_stream(SrcFd, BinSp, AttLen, DestFd),
+ {Name, Type, NewBinSp, AttLen, AttLen, Pos, Md5, identity};
+ ({Name, {Type, BinSp, AttLen}}) ->
% 09 UPGRADE CODE
- {NewBinSp, Len, Md5} = couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
- {Name, Type, NewBinSp, Len, Pos, Md5};
- ({Name, Type, BinSp, Len, RevPos, <<>>}) when is_tuple(BinSp) orelse BinSp == null ->
+ {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
+ couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
+ {Name, Type, NewBinSp, AttLen, AttLen, Pos, Md5, identity};
+ ({Name, Type, BinSp, AttLen, _RevPos, <<>>}) when
+ is_tuple(BinSp) orelse BinSp == null ->
% 09 UPGRADE CODE
- {NewBinSp, Len, Md5} = couch_stream:old_copy_to_new_stream(SrcFd, BinSp, Len, DestFd),
- {Name, Type, NewBinSp, Len, Len, Md5};
- ({Name, Type, BinSp, Len, RevPos, Md5}) ->
- {NewBinSp, Len, Md5} = couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
- {Name, Type, NewBinSp, Len, RevPos, Md5}
+ {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
+ couch_stream:old_copy_to_new_stream(SrcFd, BinSp, AttLen, DestFd),
+ {Name, Type, NewBinSp, AttLen, AttLen, AttLen, Md5, identity};
+ ({Name, Type, BinSp, AttLen, RevPos, Md5}) ->
+ % 010 UPGRADE CODE
+ {NewBinSp, AttLen, AttLen, Md5, _IdentityMd5} =
+ couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
+ {Name, Type, NewBinSp, AttLen, AttLen, RevPos, Md5, identity};
+ ({Name, Type, BinSp, AttLen, DiskLen, RevPos, Md5, Enc1}) ->
+ {NewBinSp, AttLen, _, Md5, _IdentityMd5} =
+ couch_stream:copy_to_new_stream(SrcFd, BinSp, DestFd),
+ Enc = case Enc1 of
+ true ->
+ % 0110 UPGRADE CODE
+ gzip;
+ false ->
+ % 0110 UPGRADE CODE
+ identity;
+ _ ->
+ Enc1
+ end,
+ {Name, Type, NewBinSp, AttLen, DiskLen, RevPos, Md5, Enc}
end, BinInfos),
{BodyData, NewBinInfos}.
@@ -724,7 +775,10 @@ copy_rev_tree_attachments(SrcDb, DestFd, Tree) ->
end, Tree).
-copy_docs(Db, #db{fd=DestFd}=NewDb, InfoBySeq, Retry) ->
+copy_docs(Db, #db{fd=DestFd}=NewDb, InfoBySeq0, Retry) ->
+ % COUCHDB-968, make sure we prune duplicates during compaction
+ InfoBySeq = lists:usort(fun(#doc_info{id=A}, #doc_info{id=B}) -> A =< B end,
+ InfoBySeq0),
Ids = [Id || #doc_info{id=Id} <- InfoBySeq],
LookupResults = couch_btree:lookup(Db#db.fulldocinfo_by_id_btree, Ids),
@@ -790,7 +844,7 @@ copy_compact(Db, NewDb0, Retry) ->
couch_task_status:set_update_frequency(500),
{ok, _, {NewDb2, Uncopied, TotalChanges}} =
- couch_btree:foldl(Db#db.docinfo_by_seq_btree, EnumBySeqFun,
+ couch_btree:foldl(Db#db.docinfo_by_seq_btree, EnumBySeqFun,
{NewDb, [], 0},
[{start_key, NewDb#db.update_seq + 1}]),
@@ -799,9 +853,9 @@ copy_compact(Db, NewDb0, Retry) ->
NewDb3 = copy_docs(Db, NewDb2, lists:reverse(Uncopied), Retry),
% copy misc header values
- if NewDb3#db.admins /= Db#db.admins ->
- {ok, Ptr} = couch_file:append_term(NewDb3#db.fd, Db#db.admins),
- NewDb4 = NewDb3#db{admins=Db#db.admins, admins_ptr=Ptr};
+ if NewDb3#db.security /= Db#db.security ->
+ {ok, Ptr} = couch_file:append_term(NewDb3#db.fd, Db#db.security),
+ NewDb4 = NewDb3#db{security=Db#db.security, security_ptr=Ptr};
true ->
NewDb4 = NewDb3
end,
@@ -815,7 +869,12 @@ start_copy_compact(#db{name=Name,filepath=Filepath}=Db) ->
{ok, Fd} ->
couch_task_status:add_task(<<"Database Compaction">>, <<Name/binary, " retry">>, <<"Starting">>),
Retry = true,
- {ok, Header} = couch_file:read_header(Fd);
+ case couch_file:read_header(Fd) of
+ {ok, Header} ->
+ ok;
+ no_valid_header ->
+ ok = couch_file:write_header(Fd, Header=#db_header{})
+ end;
{error, enoent} ->
couch_task_status:add_task(<<"Database Compaction">>, Name, <<"Starting">>),
{ok, Fd} = couch_file:open(CompactFile, [create]),
@@ -823,8 +882,8 @@ start_copy_compact(#db{name=Name,filepath=Filepath}=Db) ->
ok = couch_file:write_header(Fd, Header=#db_header{})
end,
NewDb = init_db(Name, CompactFile, Fd, Header),
+ unlink(Fd),
NewDb2 = copy_compact(Db, NewDb, Retry),
-
- gen_server:cast(Db#db.update_pid, {compact_done, CompactFile}),
- close_db(NewDb2).
+ close_db(NewDb2),
+ gen_server:cast(Db#db.update_pid, {compact_done, CompactFile}).
diff --git a/src/couchdb/couch_doc.erl b/src/couchdb/couch_doc.erl
index ba5c7450..7c64b8a9 100644
--- a/src/couchdb/couch_doc.erl
+++ b/src/couchdb/couch_doc.erl
@@ -13,7 +13,7 @@
-module(couch_doc).
-export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,rev_to_str/1,revs_to_strs/1]).
--export([att_foldl/3,get_validate_doc_fun/1]).
+-export([att_foldl/3,range_att_foldl/5,att_foldl_decode/3,get_validate_doc_fun/1]).
-export([from_json_obj/1,to_json_obj/2,has_stubs/1, merge_stubs/2]).
-export([validate_docid/1]).
-export([doc_from_multi_part_stream/2]).
@@ -27,8 +27,8 @@ to_json_rev(0, []) ->
to_json_rev(Start, [FirstRevId|_]) ->
[{<<"_rev">>, ?l2b([integer_to_list(Start),"-",revid_to_str(FirstRevId)])}].
-to_json_body(true, _Body) ->
- [{<<"_deleted">>, true}];
+to_json_body(true, {Body}) ->
+ Body ++ [{<<"_deleted">>, true}];
to_json_body(false, {Body}) ->
Body.
@@ -73,35 +73,48 @@ to_json_meta(Meta) ->
end, Meta).
to_json_attachments(Attachments, Options) ->
- case lists:member(attachments, Options) of
- true -> % return all the binaries
- to_json_attachments(Attachments, 0, lists:member(follows, Options));
- false ->
- % note the default is [], because this sorts higher than all numbers.
- % and will return all the binaries.
- RevPos = proplists:get_value(atts_after_revpos, Options, []),
- to_json_attachments(Attachments, RevPos, lists:member(follows, Options))
- end.
-
-to_json_attachments([], _RevPosIncludeAfter, _DataToFollow) ->
+ to_json_attachments(
+ Attachments,
+ lists:member(attachments, Options),
+ lists:member(follows, Options),
+ lists:member(att_encoding_info, Options)
+ ).
+
+to_json_attachments([], _OutputData, _DataToFollow, _ShowEncInfo) ->
[];
-to_json_attachments(Atts, RevPosIncludeAfter, DataToFollow) ->
+to_json_attachments(Atts, OutputData, DataToFollow, ShowEncInfo) ->
AttProps = lists:map(
- fun(#att{len=Len}=Att) ->
+ fun(#att{disk_len=DiskLen, att_len=AttLen, encoding=Enc}=Att) ->
{Att#att.name, {[
{<<"content_type">>, Att#att.type},
{<<"revpos">>, Att#att.revpos}
] ++
- if Att#att.revpos > RevPosIncludeAfter ->
+ if not OutputData orelse Att#att.data == stub ->
+ [{<<"length">>, DiskLen}, {<<"stub">>, true}];
+ true ->
if DataToFollow ->
- [{<<"length">>, Len}, {<<"follows">>, true}];
+ [{<<"length">>, DiskLen}, {<<"follows">>, true}];
true ->
- [{<<"data">>,
- couch_util:encodeBase64(att_to_iolist(Att))}]
- end;
- true ->
- [{<<"length">>, Len}, {<<"stub">>, true}]
- end
+ AttData = case Enc of
+ gzip ->
+ zlib:gunzip(att_to_bin(Att));
+ identity ->
+ att_to_bin(Att)
+ end,
+ [{<<"data">>, base64:encode(AttData)}]
+ end
+ end ++
+ case {ShowEncInfo, Enc} of
+ {false, _} ->
+ [];
+ {true, identity} ->
+ [];
+ {true, _} ->
+ [
+ {<<"encoding">>, couch_util:to_binary(Enc)},
+ {<<"encoded_length">>, AttLen}
+ ]
+ end
}}
end, Atts),
[{<<"_attachments">>, {AttProps}}].
@@ -182,34 +195,38 @@ transfer_fields([{<<"_rev">>, _Rev} | Rest], Doc) ->
transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
Atts = lists:map(fun({Name, {BinProps}}) ->
- case proplists:get_value(<<"stub">>, BinProps) of
+ case couch_util:get_value(<<"stub">>, BinProps) of
true ->
- Type = proplists:get_value(<<"content_type">>, BinProps),
- Length = proplists:get_value(<<"length">>, BinProps),
- RevPos = proplists:get_value(<<"revpos">>, BinProps, 0),
- #att{name=Name, data=stub, type=Type, len=Length, revpos=RevPos};
+ Type = couch_util:get_value(<<"content_type">>, BinProps),
+ RevPos = couch_util:get_value(<<"revpos">>, BinProps, nil),
+ DiskLen = couch_util:get_value(<<"length">>, BinProps),
+ {Enc, EncLen} = att_encoding_info(BinProps),
+ #att{name=Name, data=stub, type=Type, att_len=EncLen,
+ disk_len=DiskLen, encoding=Enc, revpos=RevPos};
_ ->
- Type = proplists:get_value(<<"content_type">>, BinProps,
+ Type = couch_util:get_value(<<"content_type">>, BinProps,
?DEFAULT_ATTACHMENT_CONTENT_TYPE),
- RevPos = proplists:get_value(<<"revpos">>, BinProps, 0),
- case proplists:get_value(<<"follows">>, BinProps) of
+ RevPos = couch_util:get_value(<<"revpos">>, BinProps, 0),
+ case couch_util:get_value(<<"follows">>, BinProps) of
true ->
- #att{name=Name, data=follows, type=Type,
- len=proplists:get_value(<<"length">>, BinProps),
- revpos=RevPos};
+ DiskLen = couch_util:get_value(<<"length">>, BinProps),
+ {Enc, EncLen} = att_encoding_info(BinProps),
+ #att{name=Name, data=follows, type=Type, encoding=Enc,
+ att_len=EncLen, disk_len=DiskLen, revpos=RevPos};
_ ->
- Value = proplists:get_value(<<"data">>, BinProps),
- Bin = couch_util:decodeBase64(Value),
- #att{name=Name, data=Bin, type=Type, len=size(Bin),
- revpos=RevPos}
+ Value = couch_util:get_value(<<"data">>, BinProps),
+ Bin = base64:decode(Value),
+ LenBin = size(Bin),
+ #att{name=Name, data=Bin, type=Type, att_len=LenBin,
+ disk_len=LenBin, revpos=RevPos}
end
end
end, JsonBins),
transfer_fields(Rest, Doc#doc{atts=Atts});
transfer_fields([{<<"_revisions">>, {Props}} | Rest], Doc) ->
- RevIds = proplists:get_value(<<"ids">>, Props),
- Start = proplists:get_value(<<"start">>, Props),
+ RevIds = couch_util:get_value(<<"ids">>, Props),
+ Start = couch_util:get_value(<<"start">>, Props),
if not is_integer(Start) ->
throw({doc_validation, "_revisions.start isn't an integer."});
not is_list(RevIds) ->
@@ -235,6 +252,17 @@ transfer_fields([{<<"_conflicts">>, _} | Rest], Doc) ->
transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc) ->
transfer_fields(Rest, Doc);
+% special fields for replication documents
+transfer_fields([{<<"_replication_state">>, _} = Field | Rest],
+ #doc{body=Fields} = Doc) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+transfer_fields([{<<"_replication_state_time">>, _} = Field | Rest],
+ #doc{body=Fields} = Doc) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+transfer_fields([{<<"_replication_id">>, _} = Field | Rest],
+ #doc{body=Fields} = Doc) ->
+ transfer_fields(Rest, Doc#doc{body=[Field|Fields]});
+
% unknown special field
transfer_fields([{<<"_",Name/binary>>, _} | _], _) ->
throw({doc_validation,
@@ -243,6 +271,16 @@ transfer_fields([{<<"_",Name/binary>>, _} | _], _) ->
transfer_fields([Field | Rest], #doc{body=Fields}=Doc) ->
transfer_fields(Rest, Doc#doc{body=[Field|Fields]}).
+att_encoding_info(BinProps) ->
+ DiskLen = couch_util:get_value(<<"length">>, BinProps),
+ case couch_util:get_value(<<"encoding">>, BinProps) of
+ undefined ->
+ {identity, DiskLen};
+ Enc ->
+ EncodedLen = couch_util:get_value(<<"encoded_length">>, BinProps, DiskLen),
+ {list_to_existing_atom(?b2l(Enc)), EncodedLen}
+ end.
+
to_doc_info(FullDocInfo) ->
{DocInfo, _Path} = to_doc_info_path(FullDocInfo),
DocInfo.
@@ -272,33 +310,51 @@ to_doc_info_path(#full_doc_info{id=Id,rev_tree=Tree}) ->
att_foldl(#att{data=Bin}, Fun, Acc) when is_binary(Bin) ->
Fun(Bin, Acc);
-att_foldl(#att{data={Fd,Sp},len=Len}, Fun, Acc) when is_tuple(Sp) orelse Sp == null ->
+att_foldl(#att{data={Fd,Sp},att_len=Len}, Fun, Acc) when is_tuple(Sp) orelse Sp == null ->
% 09 UPGRADE CODE
couch_stream:old_foldl(Fd, Sp, Len, Fun, Acc);
att_foldl(#att{data={Fd,Sp},md5=Md5}, Fun, Acc) ->
couch_stream:foldl(Fd, Sp, Md5, Fun, Acc);
-att_foldl(#att{data=DataFun,len=Len}, Fun, Acc) when is_function(DataFun) ->
+att_foldl(#att{data=DataFun,att_len=Len}, Fun, Acc) when is_function(DataFun) ->
fold_streamed_data(DataFun, Len, Fun, Acc).
+range_att_foldl(#att{data={Fd,Sp}}, From, To, Fun, Acc) ->
+ couch_stream:range_foldl(Fd, Sp, From, To, Fun, Acc).
-att_to_iolist(#att{data=Bin}) when is_binary(Bin) ->
+att_foldl_decode(#att{data={Fd,Sp},md5=Md5,encoding=Enc}, Fun, Acc) ->
+ couch_stream:foldl_decode(Fd, Sp, Md5, Enc, Fun, Acc);
+att_foldl_decode(#att{data=Fun2,att_len=Len, encoding=identity}, Fun, Acc) ->
+ fold_streamed_data(Fun2, Len, Fun, Acc).
+
+att_to_bin(#att{data=Bin}) when is_binary(Bin) ->
Bin;
-att_to_iolist(#att{data=Iolist}) when is_list(Iolist) ->
- Iolist;
-att_to_iolist(#att{data={Fd,Sp}}=Att) ->
- lists:reverse(att_foldl(Att,
- fun(Bin,Acc) -> [Bin|Acc] end, []));
-att_to_iolist(#att{data=DataFun, len=Len}) when is_function(DataFun)->
- lists:reverse(fold_streamed_data(DataFun, Len,
- fun(Data, Acc) -> [Data | Acc] end, [])).
+att_to_bin(#att{data=Iolist}) when is_list(Iolist) ->
+ iolist_to_binary(Iolist);
+att_to_bin(#att{data={_Fd,_Sp}}=Att) ->
+ iolist_to_binary(
+ lists:reverse(att_foldl(
+ Att,
+ fun(Bin,Acc) -> [Bin|Acc] end,
+ []
+ ))
+ );
+att_to_bin(#att{data=DataFun, att_len=Len}) when is_function(DataFun)->
+ iolist_to_binary(
+ lists:reverse(fold_streamed_data(
+ DataFun,
+ Len,
+ fun(Data, Acc) -> [Data | Acc] end,
+ []
+ ))
+ ).
get_validate_doc_fun(#doc{body={Props}}=DDoc) ->
- case proplists:get_value(<<"validate_doc_update">>, Props) of
+ case couch_util:get_value(<<"validate_doc_update">>, Props) of
undefined ->
nil;
_Else ->
- fun(EditDoc, DiskDoc, Ctx) ->
- couch_query_servers:validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx)
+ fun(EditDoc, DiskDoc, Ctx, SecObj) ->
+ couch_query_servers:validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx, SecObj)
end
end.
@@ -315,9 +371,10 @@ has_stubs([_Att|Rest]) ->
merge_stubs(#doc{id=Id,atts=MemBins}=StubsDoc, #doc{atts=DiskBins}) ->
BinDict = dict:from_list([{Name, Att} || #att{name=Name}=Att <- DiskBins]),
MergedBins = lists:map(
- fun(#att{name=Name, data=stub, revpos=RevPos}) ->
+ fun(#att{name=Name, data=stub, revpos=StubRevPos}) ->
case dict:find(Name, BinDict) of
- {ok, #att{revpos=RevPos}=DiskAtt} ->
+ {ok, #att{revpos=DiskRevPos}=DiskAtt}
+ when DiskRevPos == StubRevPos orelse StubRevPos == nil ->
DiskAtt;
_ ->
throw({missing_stub,
@@ -335,70 +392,103 @@ fold_streamed_data(RcvFun, LenLeft, Fun, Acc) when LenLeft > 0->
ResultAcc = Fun(Bin, Acc),
fold_streamed_data(RcvFun, LenLeft - size(Bin), Fun, ResultAcc).
-len_doc_to_multi_part_stream(Boundary,JsonBytes,Atts,AttsSinceRevPos) ->
- 2 + % "--"
- size(Boundary) +
- 36 + % "\r\ncontent-type: application/json\r\n\r\n"
- iolist_size(JsonBytes) +
- 4 + % "\r\n--"
- size(Boundary) +
- + lists:foldl(fun(#att{revpos=RevPos,len=Len}, AccAttsSize) ->
- if RevPos > AttsSinceRevPos ->
- AccAttsSize +
+len_doc_to_multi_part_stream(Boundary, JsonBytes, Atts, SendEncodedAtts) ->
+ AttsSize = lists:foldl(fun(#att{data=Data} = Att, AccAttsSize) ->
+ case Data of
+ stub ->
+ AccAttsSize;
+ _ ->
+ AccAttsSize +
4 + % "\r\n\r\n"
- Len +
+ case SendEncodedAtts of
+ true ->
+ Att#att.att_len;
+ _ ->
+ Att#att.disk_len
+ end +
4 + % "\r\n--"
- size(Boundary);
- true ->
- AccAttsSize
+ size(Boundary)
end
- end, 0, Atts) +
- 2. % "--"
-
-doc_to_multi_part_stream(Boundary,JsonBytes,Atts,AttsSinceRevPos,WriteFun) ->
- WriteFun([<<"--", Boundary/binary,
- "\r\ncontent-type: application/json\r\n\r\n">>,
- JsonBytes, <<"\r\n--", Boundary/binary>>]),
- atts_to_mp(Atts, Boundary, WriteFun, AttsSinceRevPos).
-
-atts_to_mp([], _Boundary, WriteFun, _AttsSinceRevPos) ->
+ end, 0, Atts),
+ if AttsSize == 0 ->
+ {<<"application/json">>, iolist_size(JsonBytes)};
+ true ->
+ {<<"multipart/related; boundary=\"", Boundary/binary, "\"">>,
+ 2 + % "--"
+ size(Boundary) +
+ 36 + % "\r\ncontent-type: application/json\r\n\r\n"
+ iolist_size(JsonBytes) +
+ 4 + % "\r\n--"
+ size(Boundary) +
+ + AttsSize +
+ 2 % "--"
+ }
+ end.
+
+doc_to_multi_part_stream(Boundary, JsonBytes, Atts, WriteFun,
+ SendEncodedAtts) ->
+ case lists:any(fun(#att{data=Data})-> Data /= stub end, Atts) of
+ true ->
+ WriteFun([<<"--", Boundary/binary,
+ "\r\ncontent-type: application/json\r\n\r\n">>,
+ JsonBytes, <<"\r\n--", Boundary/binary>>]),
+ atts_to_mp(Atts, Boundary, WriteFun, SendEncodedAtts);
+ false ->
+ WriteFun(JsonBytes)
+ end.
+
+atts_to_mp([], _Boundary, WriteFun, _SendEncAtts) ->
WriteFun(<<"--">>);
-atts_to_mp([#att{revpos=RevPos} = Att | RestAtts], Boundary, WriteFun,
- AttsSinceRevPos) when RevPos > AttsSinceRevPos ->
+atts_to_mp([#att{data=stub} | RestAtts], Boundary, WriteFun,
+ SendEncodedAtts) ->
+ atts_to_mp(RestAtts, Boundary, WriteFun, SendEncodedAtts);
+atts_to_mp([Att | RestAtts], Boundary, WriteFun,
+ SendEncodedAtts) ->
WriteFun(<<"\r\n\r\n">>),
- att_foldl(Att, fun(Data, ok) -> WriteFun(Data) end, ok),
+ AttFun = case SendEncodedAtts of
+ false ->
+ fun att_foldl_decode/3;
+ true ->
+ fun att_foldl/3
+ end,
+ AttFun(Att, fun(Data, _) -> WriteFun(Data) end, ok),
WriteFun(<<"\r\n--", Boundary/binary>>),
- atts_to_mp(RestAtts, Boundary, WriteFun, AttsSinceRevPos);
-atts_to_mp([_ | RestAtts], Boundary, WriteFun, AttsSinceRevPos) ->
- atts_to_mp(RestAtts, Boundary, WriteFun, AttsSinceRevPos).
+ atts_to_mp(RestAtts, Boundary, WriteFun, SendEncodedAtts).
doc_from_multi_part_stream(ContentType, DataFun) ->
- Parser = spawn_link(fun() ->
+ Self = self(),
+ Parser = spawn_link(fun() ->
couch_httpd:parse_multipart_request(ContentType, DataFun,
- fun(Next)-> mp_parse_doc(Next, []) end)
+ fun(Next)-> mp_parse_doc(Next, []) end),
+ unlink(Self)
end),
Parser ! {get_doc_bytes, self()},
- receive {doc_bytes, DocBytes} -> ok end,
- Doc = from_json_obj(?JSON_DECODE(DocBytes)),
- % go through the attachments looking for 'follows' in the data,
- % replace with function that reads the data from MIME stream.
- ReadAttachmentDataFun = fun() ->
- Parser ! {get_bytes, self()},
- receive {bytes, Bytes} -> Bytes end
- end,
- Atts2 = lists:map(
- fun(#att{data=follows}=A) ->
- A#att{data=ReadAttachmentDataFun};
- (A) ->
- A
- end, Doc#doc.atts),
- Doc#doc{atts=Atts2}.
+ receive
+ {doc_bytes, DocBytes} ->
+ erlang:put(mochiweb_request_recv, true),
+ Doc = from_json_obj(?JSON_DECODE(DocBytes)),
+ % go through the attachments looking for 'follows' in the data,
+ % replace with function that reads the data from MIME stream.
+ ReadAttachmentDataFun = fun() ->
+ Parser ! {get_bytes, self()},
+ receive {bytes, Bytes} -> Bytes end
+ end,
+ Atts2 = lists:map(
+ fun(#att{data=follows}=A) ->
+ A#att{data=ReadAttachmentDataFun};
+ (A) ->
+ A
+ end, Doc#doc.atts),
+ {ok, Doc#doc{atts=Atts2}}
+ end.
mp_parse_doc({headers, H}, []) ->
- {"application/json", _} = proplists:get_value("content-type", H),
- fun (Next) ->
- mp_parse_doc(Next, [])
+ case couch_util:get_value("content-type", H) of
+ {"application/json", _} ->
+ fun (Next) ->
+ mp_parse_doc(Next, [])
+ end
end;
mp_parse_doc({body, Bytes}, AccBytes) ->
fun (Next) ->
diff --git a/src/couchdb/couch_external_server.erl b/src/couchdb/couch_external_server.erl
index 8e495320..045fcee9 100644
--- a/src/couchdb/couch_external_server.erl
+++ b/src/couchdb/couch_external_server.erl
@@ -50,9 +50,11 @@ terminate(_Reason, {_Name, _Command, Pid}) ->
handle_call({execute, JsonReq}, _From, {Name, Command, Pid}) ->
{reply, couch_os_process:prompt(Pid, JsonReq), {Name, Command, Pid}}.
+handle_info({'EXIT', _Pid, normal}, State) ->
+ {noreply, State};
handle_info({'EXIT', Pid, Reason}, {Name, Command, Pid}) ->
?LOG_INFO("EXTERNAL: Process for ~s exiting. (reason: ~w)", [Name, Reason]),
- {stop, normal, {Name, Command, Pid}}.
+ {stop, Reason, {Name, Command, Pid}}.
handle_cast(stop, {Name, Command, Pid}) ->
?LOG_INFO("EXTERNAL: Shutting down ~s", [Name]),
diff --git a/src/couchdb/couch_file.erl b/src/couchdb/couch_file.erl
index 5904260c..56ebc4f2 100644
--- a/src/couchdb/couch_file.erl
+++ b/src/couchdb/couch_file.erl
@@ -19,7 +19,8 @@
-record(file, {
fd,
- tail_append_begin=0 % 09 UPGRADE CODE
+ tail_append_begin = 0, % 09 UPGRADE CODE
+ eof = 0
}).
-export([open/1, open/2, close/1, bytes/1, sync/1, append_binary/2,old_pread/3]).
@@ -27,6 +28,7 @@
-export([pread_binary/2, read_header/1, truncate/2, upgrade_old_header/2]).
-export([append_term_md5/2,append_binary_md5/2]).
-export([init/1, terminate/2, handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
+-export([delete/2,delete/3,init_delete_dir/1]).
%%----------------------------------------------------------------------
%% Args: Valid Options are [create] and [create,overwrite].
@@ -88,8 +90,8 @@ append_binary(Fd, Bin) ->
append_binary_md5(Fd, Bin) ->
Size = iolist_size(Bin),
- gen_server:call(Fd, {append_bin,
- [<<1:1/integer,Size:31/integer>>, erlang:md5(Bin), Bin]}, infinity).
+ gen_server:call(Fd, {append_bin,
+ [<<1:1/integer,Size:31/integer>>, couch_util:md5(Bin), Bin]}, infinity).
%%----------------------------------------------------------------------
@@ -118,33 +120,18 @@ pread_binary(Fd, Pos) ->
pread_iolist(Fd, Pos) ->
- {ok, LenIolist, NextPos} = read_raw_iolist(Fd, Pos, 4),
- case iolist_to_binary(LenIolist) of
- <<1:1/integer,Len:31/integer>> ->
- {ok, Md5List, ValPos} = read_raw_iolist(Fd, NextPos, 16),
- Md5 = iolist_to_binary(Md5List),
- {ok, IoList, _} = read_raw_iolist(Fd,ValPos,Len),
- case erlang:md5(IoList) of
- Md5 -> ok;
- _ -> throw(file_corruption)
- end,
+ case gen_server:call(Fd, {pread_iolist, Pos}, infinity) of
+ {ok, IoList, <<>>} ->
{ok, IoList};
- <<0:1/integer,Len:31/integer>> ->
- {ok, Iolist, _} = read_raw_iolist(Fd, NextPos, Len),
- {ok, Iolist}
- end.
-
-
-read_raw_iolist(Fd, Pos, Len) ->
- BlockOffset = Pos rem ?SIZE_BLOCK,
- TotalBytes = calculate_total_read_len(BlockOffset, Len),
- {ok, <<RawBin:TotalBytes/binary>>, HasPrefixes} = gen_server:call(Fd, {pread, Pos, TotalBytes}, infinity),
- if HasPrefixes ->
- {ok, remove_block_prefixes(BlockOffset, RawBin), Pos + TotalBytes};
- true ->
- % 09 UPGRADE CODE
- <<ReturnBin:Len/binary, _/binary>> = RawBin,
- {ok, [ReturnBin], Pos + Len}
+ {ok, IoList, Md5} ->
+ case couch_util:md5(IoList) of
+ Md5 ->
+ {ok, IoList};
+ _ ->
+ exit({file_corruption, <<"file corruption">>})
+ end;
+ Error ->
+ Error
end.
%%----------------------------------------------------------------------
@@ -172,17 +159,49 @@ truncate(Fd, Pos) ->
%% or {error, Reason}.
%%----------------------------------------------------------------------
+sync(Filepath) when is_list(Filepath) ->
+ {ok, Fd} = file:open(Filepath, [append, raw]),
+ try file:sync(Fd) after file:close(Fd) end;
sync(Fd) ->
gen_server:call(Fd, sync, infinity).
%%----------------------------------------------------------------------
-%% Purpose: Close the file. Is performed asynchronously.
+%% Purpose: Close the file.
%% Returns: ok
%%----------------------------------------------------------------------
close(Fd) ->
- Result = gen_server:cast(Fd, close),
- catch unlink(Fd),
- Result.
+ couch_util:shutdown_sync(Fd).
+
+
+delete(RootDir, Filepath) ->
+ delete(RootDir, Filepath, true).
+
+
+delete(RootDir, Filepath, Async) ->
+ DelFile = filename:join([RootDir,".delete", ?b2l(couch_uuids:random())]),
+ case file:rename(Filepath, DelFile) of
+ ok ->
+ if (Async) ->
+ spawn(file, delete, [DelFile]),
+ ok;
+ true ->
+ file:delete(DelFile)
+ end;
+ Error ->
+ Error
+ end.
+
+
+init_delete_dir(RootDir) ->
+ Dir = filename:join(RootDir,".delete"),
+ % note: ensure_dir requires an actual filename companent, which is the
+ % reason for "foo".
+ filelib:ensure_dir(filename:join(Dir,"foo")),
+ filelib:fold_files(Dir, ".*", true,
+ fun(Filename, _) ->
+ ok = file:delete(Filename)
+ end, ok).
+
% 09 UPGRADE CODE
old_pread(Fd, Pos, Len) ->
@@ -204,7 +223,7 @@ read_header(Fd) ->
write_header(Fd, Data) ->
Bin = term_to_binary(Data),
- Md5 = erlang:md5(Bin),
+ Md5 = couch_util:md5(Bin),
% now we assemble the final header binary and write to disk
FinalBin = <<Md5/binary, Bin/binary>>,
gen_server:call(Fd, {write_header, FinalBin}, infinity).
@@ -219,10 +238,11 @@ init_status_error(ReturnPid, Ref, Error) ->
% server functions
init({Filepath, Options, ReturnPid, Ref}) ->
+ process_flag(trap_exit, true),
case lists:member(create, Options) of
true ->
filelib:ensure_dir(Filepath),
- case file:open(Filepath, [read, write, raw, binary]) of
+ case file:open(Filepath, [read, append, raw, binary]) of
{ok, Fd} ->
{ok, Length} = file:position(Fd, eof),
case Length > 0 of
@@ -235,16 +255,14 @@ init({Filepath, Options, ReturnPid, Ref}) ->
{ok, 0} = file:position(Fd, 0),
ok = file:truncate(Fd),
ok = file:sync(Fd),
- couch_stats_collector:track_process_count(
- {couchdb, open_os_files}),
+ maybe_track_open_os_files(Options),
{ok, #file{fd=Fd}};
false ->
ok = file:close(Fd),
init_status_error(ReturnPid, Ref, file_exists)
end;
false ->
- couch_stats_collector:track_process_count(
- {couchdb, open_os_files}),
+ maybe_track_open_os_files(Options),
{ok, #file{fd=Fd}}
end;
Error ->
@@ -254,41 +272,71 @@ init({Filepath, Options, ReturnPid, Ref}) ->
% open in read mode first, so we don't create the file if it doesn't exist.
case file:open(Filepath, [read, raw]) of
{ok, Fd_Read} ->
- {ok, Fd} = file:open(Filepath, [read, write, raw, binary]),
+ {ok, Fd} = file:open(Filepath, [read, append, raw, binary]),
ok = file:close(Fd_Read),
- couch_stats_collector:track_process_count({couchdb, open_os_files}),
- {ok, #file{fd=Fd}};
+ maybe_track_open_os_files(Options),
+ {ok, Length} = file:position(Fd, eof),
+ {ok, #file{fd=Fd, eof=Length}};
Error ->
init_status_error(ReturnPid, Ref, Error)
end
end.
+maybe_track_open_os_files(FileOptions) ->
+ case lists:member(sys_db, FileOptions) of
+ true ->
+ ok;
+ false ->
+ couch_stats_collector:track_process_count({couchdb, open_os_files})
+ end.
-terminate(_Reason, _Fd) ->
- ok.
+terminate(_Reason, #file{fd = Fd}) ->
+ ok = file:close(Fd).
+handle_call({pread_iolist, Pos}, _From, File) ->
+ {RawData, NextPos} = try
+ % up to 8Kbs of read ahead
+ read_raw_iolist_int(File, Pos, 2 * ?SIZE_BLOCK - (Pos rem ?SIZE_BLOCK))
+ catch
+ _:_ ->
+ read_raw_iolist_int(File, Pos, 4)
+ end,
+ <<Prefix:1/integer, Len:31/integer, RestRawData/binary>> =
+ iolist_to_binary(RawData),
+ case Prefix of
+ 1 ->
+ {Md5, IoList} = extract_md5(
+ maybe_read_more_iolist(RestRawData, 16 + Len, NextPos, File)),
+ {reply, {ok, IoList, Md5}, File};
+ 0 ->
+ IoList = maybe_read_more_iolist(RestRawData, Len, NextPos, File),
+ {reply, {ok, IoList, <<>>}, File}
+ end;
handle_call({pread, Pos, Bytes}, _From, #file{fd=Fd,tail_append_begin=TailAppendBegin}=File) ->
{ok, Bin} = file:pread(Fd, Pos, Bytes),
{reply, {ok, Bin, Pos >= TailAppendBegin}, File};
-handle_call(bytes, _From, #file{fd=Fd}=File) ->
- {reply, file:position(Fd, eof), File};
+handle_call(bytes, _From, #file{eof=Length}=File) ->
+ {reply, {ok, Length}, File};
handle_call(sync, _From, #file{fd=Fd}=File) ->
{reply, file:sync(Fd), File};
handle_call({truncate, Pos}, _From, #file{fd=Fd}=File) ->
{ok, Pos} = file:position(Fd, Pos),
- {reply, file:truncate(Fd), File};
-handle_call({append_bin, Bin}, _From, #file{fd=Fd}=File) ->
- {ok, Pos} = file:position(Fd, eof),
+ case file:truncate(Fd) of
+ ok ->
+ {reply, ok, File#file{eof=Pos}};
+ Error ->
+ {reply, Error, File}
+ end;
+handle_call({append_bin, Bin}, _From, #file{fd=Fd, eof=Pos}=File) ->
Blocks = make_blocks(Pos rem ?SIZE_BLOCK, Bin),
- case file:pwrite(Fd, Pos, Blocks) of
+ case file:write(Fd, Blocks) of
ok ->
- {reply, {ok, Pos}, File};
+ {reply, {ok, Pos}, File#file{eof=Pos+iolist_size(Blocks)}};
Error ->
{reply, Error, File}
end;
-handle_call({write_header, Bin}, _From, #file{fd=Fd}=File) ->
- {ok, Pos} = file:position(Fd, eof),
+handle_call({write_header, Bin}, _From, #file{fd=Fd, eof=Pos}=File) ->
BinSize = size(Bin),
case Pos rem ?SIZE_BLOCK of
0 ->
@@ -296,16 +344,21 @@ handle_call({write_header, Bin}, _From, #file{fd=Fd}=File) ->
BlockOffset ->
Padding = <<0:(8*(?SIZE_BLOCK-BlockOffset))>>
end,
- FinalBin = [Padding, <<1, BinSize:32/integer>> | make_blocks(1, [Bin])],
- {reply, file:pwrite(Fd, Pos, FinalBin), File};
+ FinalBin = [Padding, <<1, BinSize:32/integer>> | make_blocks(5, [Bin])],
+ case file:write(Fd, FinalBin) of
+ ok ->
+ {reply, ok, File#file{eof=Pos+iolist_size(FinalBin)}};
+ Error ->
+ {reply, Error, File}
+ end;
handle_call({upgrade_old_header, Prefix}, _From, #file{fd=Fd}=File) ->
case (catch read_old_header(Fd, Prefix)) of
{ok, Header} ->
- {ok, TailAppendBegin} = file:position(Fd, eof),
+ TailAppendBegin = File#file.eof,
Bin = term_to_binary(Header),
- Md5 = erlang:md5(Bin),
+ Md5 = couch_util:md5(Bin),
% now we assemble the final header binary and write to disk
FinalBin = <<Md5/binary, Bin/binary>>,
{reply, ok, _} = handle_call({write_header, FinalBin}, ok, File),
@@ -321,8 +374,7 @@ handle_call({upgrade_old_header, Prefix}, _From, #file{fd=Fd}=File) ->
end;
-handle_call(find_header, _From, #file{fd=Fd}=File) ->
- {ok, Pos} = file:position(Fd, eof),
+handle_call(find_header, _From, #file{fd=Fd, eof=Pos}=File) ->
{reply, find_header(Fd, Pos div ?SIZE_BLOCK), File}.
% 09 UPGRADE CODE
@@ -386,7 +438,7 @@ extract_header(Prefix, Bin) ->
case HeaderPrefix of
Prefix ->
% check the integrity signature
- case erlang:md5(TermBin) == Sig of
+ case couch_util:md5(TermBin) == Sig of
true ->
Header = binary_to_term(TermBin),
{ok, Header};
@@ -416,7 +468,7 @@ write_old_header(Fd, Prefix, Data) ->
ok = file:sync(Fd),
% pad out the header with zeros, then take the md5 hash
PadZeros = <<0:(8*(?HEADER_SIZE - FilledSize2))>>,
- Sig = erlang:md5([TermBin2, PadZeros]),
+ Sig = couch_util:md5([TermBin2, PadZeros]),
% now we assemble the final header binary and write to disk
WriteBin = <<Prefix/binary, TermBin2/binary, PadZeros/binary, Sig/binary>>,
?HEADER_SIZE = size(WriteBin), % sanity check
@@ -432,6 +484,8 @@ handle_cast(close, Fd) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
+handle_info({'EXIT', _, normal}, Fd) ->
+ {noreply, Fd};
handle_info({'EXIT', _, Reason}, Fd) ->
{stop, Reason, Fd}.
@@ -447,16 +501,53 @@ find_header(Fd, Block) ->
end.
load_header(Fd, Block) ->
- {ok, <<1>>} = file:pread(Fd, Block*?SIZE_BLOCK, 1),
- {ok, <<HeaderLen:32/integer>>} = file:pread(Fd, (Block*?SIZE_BLOCK) + 1, 4),
+ {ok, <<1, HeaderLen:32/integer, RestBlock/binary>>} =
+ file:pread(Fd, Block * ?SIZE_BLOCK, ?SIZE_BLOCK),
TotalBytes = calculate_total_read_len(1, HeaderLen),
- {ok, <<RawBin:TotalBytes/binary>>} =
- file:pread(Fd, (Block*?SIZE_BLOCK) + 5, TotalBytes),
+ case TotalBytes > byte_size(RestBlock) of
+ false ->
+ <<RawBin:TotalBytes/binary, _/binary>> = RestBlock;
+ true ->
+ {ok, Missing} = file:pread(
+ Fd, (Block * ?SIZE_BLOCK) + 5 + byte_size(RestBlock),
+ TotalBytes - byte_size(RestBlock)),
+ RawBin = <<RestBlock/binary, Missing/binary>>
+ end,
<<Md5Sig:16/binary, HeaderBin/binary>> =
iolist_to_binary(remove_block_prefixes(1, RawBin)),
- Md5Sig = erlang:md5(HeaderBin),
+ Md5Sig = couch_util:md5(HeaderBin),
{ok, HeaderBin}.
+maybe_read_more_iolist(Buffer, DataSize, _, _)
+ when DataSize =< byte_size(Buffer) ->
+ <<Data:DataSize/binary, _/binary>> = Buffer,
+ [Data];
+maybe_read_more_iolist(Buffer, DataSize, NextPos, File) ->
+ {Missing, _} =
+ read_raw_iolist_int(File, NextPos, DataSize - byte_size(Buffer)),
+ [Buffer, Missing].
+
+-spec read_raw_iolist_int(#file{}, Pos::non_neg_integer(), Len::non_neg_integer()) ->
+ {Data::iolist(), CurPos::non_neg_integer()}.
+read_raw_iolist_int(Fd, {Pos, _Size}, Len) -> % 0110 UPGRADE CODE
+ read_raw_iolist_int(Fd, Pos, Len);
+read_raw_iolist_int(#file{fd=Fd, tail_append_begin=TAB}, Pos, Len) ->
+ BlockOffset = Pos rem ?SIZE_BLOCK,
+ TotalBytes = calculate_total_read_len(BlockOffset, Len),
+ {ok, <<RawBin:TotalBytes/binary>>} = file:pread(Fd, Pos, TotalBytes),
+ if Pos >= TAB ->
+ {remove_block_prefixes(BlockOffset, RawBin), Pos + TotalBytes};
+ true ->
+ % 09 UPGRADE CODE
+ <<ReturnBin:Len/binary, _/binary>> = RawBin,
+ {[ReturnBin], Pos + Len}
+ end.
+
+-spec extract_md5(iolist()) -> {binary(), iolist()}.
+extract_md5(FullIoList) ->
+ {Md5List, IoList} = split_iolist(FullIoList, 16, []),
+ {iolist_to_binary(Md5List), IoList}.
+
calculate_total_read_len(0, FinalLen) ->
calculate_total_read_len(1, FinalLen) + 1;
calculate_total_read_len(BlockOffset, FinalLen) ->
@@ -495,6 +586,11 @@ make_blocks(BlockOffset, IoList) ->
IoList
end.
+%% @doc Returns a tuple where the first element contains the leading SplitAt
+%% bytes of the original iolist, and the 2nd element is the tail. If SplitAt
+%% is larger than byte_size(IoList), return the difference.
+-spec split_iolist(IoList::iolist(), SplitAt::non_neg_integer(), Acc::list()) ->
+ {iolist(), iolist()} | non_neg_integer().
split_iolist(List, 0, BeginAcc) ->
{lists:reverse(BeginAcc), List};
split_iolist([], SplitAt, _BeginAcc) ->
diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl
index 252ecdb7..6bd5c893 100644
--- a/src/couchdb/couch_httpd.erl
+++ b/src/couchdb/couch_httpd.erl
@@ -13,27 +13,51 @@
-module(couch_httpd).
-include("couch_db.hrl").
--export([start_link/0, stop/0, handle_request/5]).
+-export([start_link/0, start_link/1, stop/0, handle_request/5]).
--export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,absolute_uri/2,body_length/1]).
+-export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,qs_json_value/3]).
+-export([path/1,absolute_uri/2,body_length/1]).
-export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,recv_chunked/4,error_info/1]).
-export([make_fun_spec_strs/1]).
+-export([make_arity_1_fun/1, make_arity_2_fun/1, make_arity_3_fun/1]).
-export([parse_form/1,json_body/1,json_body_obj/1,body/1,doc_etag/1, make_etag/1, etag_respond/3]).
-export([primary_header_value/2,partition/1,serve_file/3,serve_file/4, server_header/0]).
-export([start_chunked_response/3,send_chunk/2,log_request/2]).
--export([start_response_length/4, send/2]).
+-export([start_response_length/4, start_response/3, send/2]).
-export([start_json_response/2, start_json_response/3, end_json_response/1]).
-export([send_response/4,send_method_not_allowed/2,send_error/4, send_redirect/2,send_chunked_error/2]).
-export([send_json/2,send_json/3,send_json/4,last_chunk/1,parse_multipart_request/3]).
+-export([accepted_encodings/1,handle_request_int/5,validate_referer/1,validate_ctype/2]).
start_link() ->
+ start_link(http).
+start_link(http) ->
+ Port = couch_config:get("httpd", "port", "5984"),
+ start_link(?MODULE, [{port, Port}]);
+start_link(https) ->
+ Port = couch_config:get("ssl", "port", "5984"),
+ CertFile = couch_config:get("ssl", "cert_file", nil),
+ KeyFile = couch_config:get("ssl", "key_file", nil),
+ Options = case CertFile /= nil andalso KeyFile /= nil of
+ true ->
+ [{port, Port},
+ {ssl, true},
+ {ssl_opts, [
+ {certfile, CertFile},
+ {keyfile, KeyFile}]}];
+ false ->
+ io:format("SSL enabled but PEM certificates are missing.", []),
+ throw({error, missing_certs})
+ end,
+ start_link(https, Options).
+start_link(Name, Options) ->
% read config and register for configuration changes
% just stop if one of the config settings change. couch_server_sup
% will restart us and then we will pick up the new settings.
BindAddress = couch_config:get("httpd", "bind_address", any),
- Port = couch_config:get("httpd", "port", "5984"),
+ NoDelay = "true" == couch_config:get("httpd", "nodelay", "false"),
DefaultSpec = "{couch_httpd_db, handle_request}",
DefaultFun = make_arity_1_fun(
@@ -66,11 +90,11 @@ start_link() ->
% and off we go
- {ok, Pid} = case mochiweb_http:start([
+ {ok, Pid} = case mochiweb_http:start(Options ++ [
{loop, Loop},
- {name, ?MODULE},
+ {name, Name},
{ip, BindAddress},
- {port, Port}
+ {nodelay,NoDelay}
]) of
{ok, MochiPid} -> {ok, MochiPid};
{error, Reason} ->
@@ -83,11 +107,17 @@ start_link() ->
?MODULE:stop();
("httpd", "port") ->
?MODULE:stop();
+ ("httpd", "max_connections") ->
+ ?MODULE:stop();
("httpd", "default_handler") ->
?MODULE:stop();
("httpd_global_handlers", _) ->
?MODULE:stop();
("httpd_db_handlers", _) ->
+ ?MODULE:stop();
+ ("vhosts", _) ->
+ ?MODULE:stop();
+ ("ssl", _) ->
?MODULE:stop()
end, Pid),
@@ -121,14 +151,21 @@ make_arity_3_fun(SpecStr) ->
% SpecStr is "{my_module, my_fun}, {my_module2, my_fun2}"
make_fun_spec_strs(SpecStr) ->
- [FunSpecStr || FunSpecStr <- re:split(SpecStr, "(?<=})\\s*,\\s*(?={)", [{return, list}])].
+ re:split(SpecStr, "(?<=})\\s*,\\s*(?={)", [{return, list}]).
stop() ->
mochiweb_http:stop(?MODULE).
-handle_request(MochiReq, DefaultFun,
- UrlHandlers, DbUrlHandlers, DesignUrlHandlers) ->
+handle_request(MochiReq, DefaultFun, UrlHandlers, DbUrlHandlers,
+ DesignUrlHandlers) ->
+
+ MochiReq1 = couch_httpd_vhost:match_vhost(MochiReq),
+ handle_request_int(MochiReq1, DefaultFun,
+ UrlHandlers, DbUrlHandlers, DesignUrlHandlers).
+
+handle_request_int(MochiReq, DefaultFun,
+ UrlHandlers, DbUrlHandlers, DesignUrlHandlers) ->
Begin = now(),
AuthenticationSrcs = make_fun_spec_strs(
couch_config:get("httpd", "authentication_handlers")),
@@ -137,6 +174,14 @@ handle_request(MochiReq, DefaultFun,
RawUri = MochiReq:get(raw_path),
{"/" ++ Path, _, _} = mochiweb_util:urlsplit_path(RawUri),
+ Headers = MochiReq:get(headers),
+
+ % get requested path
+ RequestedPath = case MochiReq:get_header_value("x-couchdb-vhost-path") of
+ undefined -> RawUri;
+ P -> P
+ end,
+
HandlerKey =
case mochiweb_util:partition(Path, "/") of
{"", "", ""} ->
@@ -144,10 +189,11 @@ handle_request(MochiReq, DefaultFun,
{FirstPart, _, _} ->
list_to_binary(FirstPart)
end,
- ?LOG_DEBUG("~p ~s ~p~nHeaders: ~p", [
+ ?LOG_DEBUG("~p ~s ~p from ~p~nHeaders: ~p", [
MochiReq:get(method),
RawUri,
MochiReq:get(version),
+ MochiReq:get(peer),
mochiweb_headers:to_list(MochiReq:get(headers))
]),
@@ -161,9 +207,26 @@ handle_request(MochiReq, DefaultFun,
Meth -> couch_util:to_existing_atom(Meth)
end,
increment_method_stats(Method1),
+
+ % allow broken HTTP clients to fake a full method vocabulary with an X-HTTP-METHOD-OVERRIDE header
+ MethodOverride = MochiReq:get_primary_header_value("X-HTTP-Method-Override"),
+ Method2 = case lists:member(MethodOverride, ["GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT", "COPY"]) of
+ true ->
+ ?LOG_INFO("MethodOverride: ~s (real method was ~s)", [MethodOverride, Method1]),
+ case Method1 of
+ 'POST' -> couch_util:to_existing_atom(MethodOverride);
+ _ ->
+ % Ignore X-HTTP-Method-Override when the original verb isn't POST.
+ % I'd like to send a 406 error to the client, but that'd require a nasty refactor.
+ % throw({not_acceptable, <<"X-HTTP-Method-Override may only be used with POST requests.">>})
+ Method1
+ end;
+ _ -> Method1
+ end,
+
% alias HEAD to GET as mochiweb takes care of stripping the body
- Method = case Method1 of
- 'HEAD' -> 'GET';
+ Method = case Method2 of
+ 'HEAD' -> 'GET';
Other -> Other
end,
@@ -171,10 +234,14 @@ handle_request(MochiReq, DefaultFun,
mochi_req = MochiReq,
peer = MochiReq:get(peer),
method = Method,
+ requested_path_parts = [list_to_binary(couch_httpd:unquote(Part))
+ || Part <- string:tokens(RequestedPath, "/")],
path_parts = [list_to_binary(couch_httpd:unquote(Part))
|| Part <- string:tokens(Path, "/")],
db_url_handlers = DbUrlHandlers,
- design_url_handlers = DesignUrlHandlers
+ design_url_handlers = DesignUrlHandlers,
+ default_fun = DefaultFun,
+ url_handlers = UrlHandlers
},
HandlerFun = couch_util:dict_find(HandlerKey, UrlHandlers, DefaultFun),
@@ -191,8 +258,15 @@ handle_request(MochiReq, DefaultFun,
throw:{http_head_abort, Resp0} ->
{ok, Resp0};
throw:{invalid_json, S} ->
- ?LOG_ERROR("attempted upload of invalid JSON ~s", [S]),
- send_error(HttpReq, {bad_request, "invalid UTF-8 JSON"});
+ ?LOG_ERROR("attempted upload of invalid JSON (set log_level to debug to log it)", []),
+ ?LOG_DEBUG("Invalid JSON: ~p",[S]),
+ send_error(HttpReq, {bad_request, io_lib:format("invalid UTF-8 JSON: ~p",[S])});
+ throw:unacceptable_encoding ->
+ ?LOG_ERROR("unsupported encoding method for the response", []),
+ send_error(HttpReq, {not_acceptable, "unsupported encoding"});
+ throw:bad_accept_encoding_value ->
+ ?LOG_ERROR("received invalid Accept-Encoding header", []),
+ send_error(HttpReq, bad_request);
exit:normal ->
exit(normal);
throw:Error ->
@@ -243,6 +317,33 @@ authenticate_request(Response, _AuthSrcs) ->
increment_method_stats(Method) ->
couch_stats_collector:increment({httpd_request_methods, Method}).
+validate_referer(Req) ->
+ Host = host_for_request(Req),
+ Referer = header_value(Req, "Referer", fail),
+ case Referer of
+ fail ->
+ throw({bad_request, <<"Referer header required.">>});
+ Referer ->
+ {_,RefererHost,_,_,_} = mochiweb_util:urlsplit(Referer),
+ if
+ RefererHost =:= Host -> ok;
+ true -> throw({bad_request, <<"Referer header must match host.">>})
+ end
+ end.
+
+validate_ctype(Req, Ctype) ->
+ case couch_httpd:header_value(Req, "Content-Type") of
+ undefined ->
+ throw({bad_ctype, "Content-Type must be "++Ctype});
+ ReqCtype ->
+ % ?LOG_ERROR("Ctype ~p ReqCtype ~p",[Ctype,ReqCtype]),
+ case re:split(ReqCtype, ";", [{return, list}]) of
+ [Ctype] -> ok;
+ [Ctype, _Rest] -> ok;
+ _Else ->
+ throw({bad_ctype, "Content-Type must be "++Ctype})
+ end
+ end.
% Utilities
@@ -261,7 +362,17 @@ header_value(#httpd{mochi_req=MochiReq}, Key, Default) ->
primary_header_value(#httpd{mochi_req=MochiReq}, Key) ->
MochiReq:get_primary_header_value(Key).
-serve_file(#httpd{mochi_req=MochiReq}=Req, RelativePath, DocumentRoot) ->
+accepted_encodings(#httpd{mochi_req=MochiReq}) ->
+ case MochiReq:accepted_encodings(["gzip", "identity"]) of
+ bad_accept_encoding_value ->
+ throw(bad_accept_encoding_value);
+ [] ->
+ throw(unacceptable_encoding);
+ EncList ->
+ EncList
+ end.
+
+serve_file(Req, RelativePath, DocumentRoot) ->
serve_file(Req, RelativePath, DocumentRoot, []).
serve_file(#httpd{mochi_req=MochiReq}=Req, RelativePath, DocumentRoot, ExtraHeaders) ->
@@ -272,7 +383,15 @@ qs_value(Req, Key) ->
qs_value(Req, Key, undefined).
qs_value(Req, Key, Default) ->
- proplists:get_value(Key, qs(Req), Default).
+ couch_util:get_value(Key, qs(Req), Default).
+
+qs_json_value(Req, Key, Default) ->
+ case qs_value(Req, Key, Default) of
+ Default ->
+ Default;
+ Result ->
+ ?JSON_DECODE(Result)
+ end.
qs(#httpd{mochi_req=MochiReq}) ->
MochiReq:parse_qs().
@@ -280,30 +399,36 @@ qs(#httpd{mochi_req=MochiReq}) ->
path(#httpd{mochi_req=MochiReq}) ->
MochiReq:get(path).
-absolute_uri(#httpd{mochi_req=MochiReq}, Path) ->
+host_for_request(#httpd{mochi_req=MochiReq}) ->
XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"),
- Host = case MochiReq:get_header_value(XHost) of
+ case MochiReq:get_header_value(XHost) of
undefined ->
case MochiReq:get_header_value("Host") of
- undefined ->
+ undefined ->
{ok, {Address, Port}} = inet:sockname(MochiReq:get(socket)),
inet_parse:ntoa(Address) ++ ":" ++ integer_to_list(Port);
Value1 ->
Value1
end;
Value -> Value
- end,
+ end.
+
+absolute_uri(#httpd{mochi_req=MochiReq}=Req, Path) ->
+ Host = host_for_request(Req),
XSsl = couch_config:get("httpd", "x_forwarded_ssl", "X-Forwarded-Ssl"),
Scheme = case MochiReq:get_header_value(XSsl) of
- "on" -> "https";
- _ ->
- XProto = couch_config:get("httpd", "x_forwarded_proto", "X-Forwarded-Proto"),
- case MochiReq:get_header_value(XProto) of
- % Restrict to "https" and "http" schemes only
- "https" -> "https";
- _ -> "http"
- end
- end,
+ "on" -> "https";
+ _ ->
+ XProto = couch_config:get("httpd", "x_forwarded_proto", "X-Forwarded-Proto"),
+ case MochiReq:get_header_value(XProto) of
+ %% Restrict to "https" and "http" schemes only
+ "https" -> "https";
+ _ -> case MochiReq:get(scheme) of
+ https -> "https";
+ http -> "http"
+ end
+ end
+ end,
Scheme ++ "://" ++ Host ++ Path.
unquote(UrlEncodedString) ->
@@ -362,7 +487,7 @@ doc_etag(#doc{revs={Start, [DiskRev|_]}}) ->
"\"" ++ ?b2l(couch_doc:rev_to_str({Start, DiskRev})) ++ "\"".
make_etag(Term) ->
- <<SigInt:128/integer>> = erlang:md5(term_to_binary(Term)),
+ <<SigInt:128/integer>> = couch_util:md5(term_to_binary(Term)),
list_to_binary("\"" ++ lists:flatten(io_lib:format("~.36B",[SigInt])) ++ "\"").
etag_match(Req, CurrentEtag) when is_binary(CurrentEtag) ->
@@ -391,10 +516,10 @@ verify_is_server_admin(#user_ctx{roles=Roles}) ->
false -> throw({unauthorized, <<"You are not a server admin.">>})
end.
-log_request(#httpd{mochi_req=MochiReq,peer=Peer,method=Method}, Code) ->
+log_request(#httpd{mochi_req=MochiReq,peer=Peer}, Code) ->
?LOG_INFO("~s - - ~p ~s ~B", [
Peer,
- Method,
+ couch_util:to_existing_atom(MochiReq:get(method)),
MochiReq:get(raw_path),
couch_util:to_integer(Code)
]).
@@ -410,6 +535,18 @@ start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Length) ->
end,
{ok, Resp}.
+start_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) ->
+ log_request(Req, Code),
+ couch_stats_collector:increment({httpd_status_cdes, Code}),
+ CookieHeader = couch_httpd_auth:cookie_auth_header(Req, Headers),
+ Headers2 = Headers ++ server_header() ++ CookieHeader,
+ Resp = MochiReq:start_response({Code, Headers2}),
+ case MochiReq:get(method) of
+ 'HEAD' -> throw({http_head_abort, Resp});
+ _ -> ok
+ end,
+ {ok, Resp}.
+
send(Resp, Data) ->
Resp:send(Data),
{ok, Resp}.
@@ -513,8 +650,18 @@ start_jsonp(Req) ->
[] -> [];
CallBack ->
try
- validate_callback(CallBack),
- CallBack ++ "("
+ % make sure jsonp is configured on (default off)
+ case couch_config:get("httpd", "allow_jsonp", "false") of
+ "true" ->
+ validate_callback(CallBack),
+ CallBack ++ "(";
+ _Else ->
+ % this could throw an error message, but instead we just ignore the
+ % jsonp parameter
+ % throw({bad_request, <<"JSONP must be configured before using.">>})
+ put(jsonp, no_jsonp),
+ []
+ end
catch
Error ->
put(jsonp, no_jsonp),
@@ -578,10 +725,12 @@ error_info(file_exists) ->
"created, the file already exists.">>};
error_info({bad_ctype, Reason}) ->
{415, <<"bad_content_type">>, Reason};
+error_info(requested_range_not_satisfiable) ->
+ {416, <<"requested_range_not_satisfiable">>, <<"Requested range not satisfiable">>};
error_info({error, illegal_database_name}) ->
{400, <<"illegal_database_name">>, <<"Only lowercase characters (a-z), "
"digits (0-9), and any of the characters _, $, (, ), +, -, and / "
- "are allowed">>};
+ "are allowed. Must begin with a letter.">>};
error_info({missing_stub, Reason}) ->
{412, <<"missing_stub">>, Reason};
error_info({Error, Reason}) ->
@@ -589,28 +738,66 @@ error_info({Error, Reason}) ->
error_info(Error) ->
{500, <<"unknown_error">>, couch_util:to_binary(Error)}.
-send_error(_Req, {already_sent, Resp, _Error}) ->
- {ok, Resp};
-
-send_error(#httpd{mochi_req=MochiReq}=Req, Error) ->
- {Code, ErrorStr, ReasonStr} = error_info(Error),
- Headers = if Code == 401 ->
+error_headers(#httpd{mochi_req=MochiReq}=Req, Code, ErrorStr, ReasonStr) ->
+ if Code == 401 ->
% this is where the basic auth popup is triggered
case MochiReq:get_header_value("X-CouchDB-WWW-Authenticate") of
undefined ->
case couch_config:get("httpd", "WWW-Authenticate", nil) of
nil ->
- [];
+ % If the client is a browser and the basic auth popup isn't turned on
+ % redirect to the session page.
+ case ErrorStr of
+ <<"unauthorized">> ->
+ case couch_config:get("couch_httpd_auth", "authentication_redirect", nil) of
+ nil -> {Code, []};
+ AuthRedirect ->
+ case couch_config:get("couch_httpd_auth", "require_valid_user", "false") of
+ "true" ->
+ % send the browser popup header no matter what if we are require_valid_user
+ {Code, [{"WWW-Authenticate", "Basic realm=\"server\""}]};
+ _False ->
+ case MochiReq:accepts_content_type("text/html") of
+ false ->
+ {Code, []};
+ true ->
+ % Redirect to the path the user requested, not
+ % the one that is used internally.
+ UrlReturnRaw = case MochiReq:get_header_value("x-couchdb-vhost-path") of
+ undefined ->
+ MochiReq:get(path);
+ VHostPath ->
+ VHostPath
+ end,
+ RedirectLocation = lists:flatten([
+ AuthRedirect,
+ "?return=", couch_util:url_encode(UrlReturnRaw),
+ "&reason=", couch_util:url_encode(ReasonStr)
+ ]),
+ {302, [{"Location", absolute_uri(Req, RedirectLocation)}]}
+ end
+ end
+ end;
+ _Else ->
+ {Code, []}
+ end;
Type ->
- [{"WWW-Authenticate", Type}]
+ {Code, [{"WWW-Authenticate", Type}]}
end;
Type ->
- [{"WWW-Authenticate", Type}]
+ {Code, [{"WWW-Authenticate", Type}]}
end;
true ->
- []
- end,
- send_error(Req, Code, Headers, ErrorStr, ReasonStr).
+ {Code, []}
+ end.
+
+send_error(_Req, {already_sent, Resp, _Error}) ->
+ {ok, Resp};
+
+send_error(Req, Error) ->
+ {Code, ErrorStr, ReasonStr} = error_info(Error),
+ {Code1, Headers} = error_headers(Req, Code, ErrorStr, ReasonStr),
+ send_error(Req, Code1, Headers, ErrorStr, ReasonStr).
send_error(Req, Code, ErrorStr, ReasonStr) ->
send_error(Req, Code, [], ErrorStr, ReasonStr).
@@ -667,22 +854,24 @@ parse_multipart_request(ContentType, DataFun, Callback) ->
buffer= <<>>,
data_fun=DataFun,
callback=Callback},
- {Mp2, _NilCallback} = read_until(Mp, <<"--", Boundary0/binary>>,
+ {Mp2, _NilCallback} = read_until(Mp, <<"--", Boundary0/binary>>,
fun(Next)-> nil_callback(Next) end),
- #mp{buffer=Buffer, data_fun=DataFun2, callback=Callback2} =
+ #mp{buffer=Buffer, data_fun=DataFun2, callback=Callback2} =
parse_part_header(Mp2),
{Buffer, DataFun2, Callback2}.
nil_callback(_Data)->
fun(Next) -> nil_callback(Next) end.
-get_boundary(ContentType) ->
- {"multipart/" ++ _, Opts} = mochiweb_util:parse_header(ContentType),
- case proplists:get_value("boundary", Opts) of
+get_boundary({"multipart/" ++ _, Opts}) ->
+ case couch_util:get_value("boundary", Opts) of
S when is_list(S) ->
S
- end.
-
+ end;
+get_boundary(ContentType) ->
+ {"multipart/" ++ _ , Opts} = mochiweb_util:parse_header(ContentType),
+ get_boundary({"multipart/", Opts}).
+
split_header(<<>>) ->
@@ -700,6 +889,11 @@ read_until(#mp{data_fun=DataFun, buffer=Buffer}=Mp, Pattern, Callback) ->
{Buffer2, DataFun2} = DataFun(),
Buffer3 = iolist_to_binary(Buffer2),
read_until(Mp#mp{data_fun=DataFun2,buffer=Buffer3}, Pattern, Callback2);
+ {partial, 0} ->
+ {NewData, DataFun2} = DataFun(),
+ read_until(Mp#mp{data_fun=DataFun2,
+ buffer= iolist_to_binary([Buffer,NewData])},
+ Pattern, Callback);
{partial, Skip} ->
<<DataChunk:Skip/binary, Rest/binary>> = Buffer,
Callback2 = Callback(DataChunk),
@@ -707,6 +901,10 @@ read_until(#mp{data_fun=DataFun, buffer=Buffer}=Mp, Pattern, Callback) ->
read_until(Mp#mp{data_fun=DataFun2,
buffer= iolist_to_binary([Rest | NewData])},
Pattern, Callback2);
+ {exact, 0} ->
+ PatternLen = size(Pattern),
+ <<_:PatternLen/binary, Rest/binary>> = Buffer,
+ {Mp#mp{buffer= Rest}, Callback};
{exact, Skip} ->
PatternLen = size(Pattern),
<<DataChunk:Skip/binary, _:PatternLen/binary, Rest/binary>> = Buffer,
diff --git a/src/couchdb/couch_httpd_auth.erl b/src/couchdb/couch_httpd_auth.erl
index 554886ca..a370ebdf 100644
--- a/src/couchdb/couch_httpd_auth.erl
+++ b/src/couchdb/couch_httpd_auth.erl
@@ -16,9 +16,9 @@
-export([default_authentication_handler/1,special_test_authentication_handler/1]).
-export([cookie_authentication_handler/1]).
-export([null_authentication_handler/1]).
+-export([proxy_authentification_handler/1]).
-export([cookie_auth_header/2]).
-export([handle_session_req/1]).
--export([ensure_users_db_exists/1, get_user/1]).
-import(couch_httpd, [header_value/2, send_json/2,send_json/4, send_method_not_allowed/2]).
@@ -43,11 +43,11 @@ special_test_authentication_handler(Req) ->
Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}}
end.
-basic_username_pw(Req) ->
+basic_name_pw(Req) ->
AuthorizationHeader = header_value(Req, "Authorization"),
case AuthorizationHeader of
"Basic " ++ Base64Value ->
- case string:tokens(?b2l(couch_util:decodeBase64(Base64Value)),":") of
+ case string:tokens(?b2l(base64:decode(Base64Value)),":") of
["_", "_"] ->
% special name and pass to be logged out
nil;
@@ -63,20 +63,20 @@ basic_username_pw(Req) ->
end.
default_authentication_handler(Req) ->
- case basic_username_pw(Req) of
+ case basic_name_pw(Req) of
{User, Pass} ->
- case get_user(?l2b(User)) of
+ case couch_auth_cache:get_user_creds(User) of
nil ->
throw({unauthorized, <<"Name or password is incorrect.">>});
UserProps ->
- UserSalt = proplists:get_value(<<"salt">>, UserProps, <<>>),
+ UserSalt = couch_util:get_value(<<"salt">>, UserProps, <<>>),
PasswordHash = hash_password(?l2b(Pass), UserSalt),
- case proplists:get_value(<<"password_sha">>, UserProps, nil) of
- ExpectedHash when ExpectedHash == PasswordHash ->
+ ExpectedHash = couch_util:get_value(<<"password_sha">>, UserProps, nil),
+ case couch_util:verify(ExpectedHash, PasswordHash) of
+ true ->
Req#httpd{user_ctx=#user_ctx{
name=?l2b(User),
- roles=proplists:get_value(<<"roles">>, UserProps, []),
- user_doc={UserProps}
+ roles=couch_util:get_value(<<"roles">>, UserProps, [])
}};
_Else ->
throw({unauthorized, <<"Name or password is incorrect.">>})
@@ -99,176 +99,106 @@ default_authentication_handler(Req) ->
null_authentication_handler(Req) ->
Req#httpd{user_ctx=#user_ctx{roles=[<<"_admin">>]}}.
-% maybe we can use hovercraft to simplify running this view query
-% rename to get_user_from_users_db
-get_user(UserName) ->
- case couch_config:get("admins", ?b2l(UserName)) of
- "-hashed-" ++ HashedPwdAndSalt ->
- % the username is an admin, now check to see if there is a user doc
- % which has a matching username, salt, and password_sha
- [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","),
- case get_user_props_from_db(UserName) of
- nil ->
- [{<<"roles">>, [<<"_admin">>]},
- {<<"salt">>, ?l2b(Salt)},
- {<<"password_sha">>, ?l2b(HashedPwd)}];
- UserProps when is_list(UserProps) ->
- DocRoles = proplists:get_value(<<"roles">>, UserProps),
- [{<<"roles">>, [<<"_admin">> | DocRoles]},
- {<<"salt">>, ?l2b(Salt)},
- {<<"password_sha">>, ?l2b(HashedPwd)},
- {<<"user_doc">>, {UserProps}}]
- end;
- Else ->
- get_user_props_from_db(UserName)
- end.
-
-get_user_props_from_db(UserName) ->
- DbName = couch_config:get("couch_httpd_auth", "authentication_db"),
- {ok, Db} = ensure_users_db_exists(?l2b(DbName)),
- DocId = <<"org.couchdb.user:", UserName/binary>>,
- try couch_httpd_db:couch_doc_open(Db, DocId, nil, []) of
- #doc{}=Doc ->
- {DocProps} = couch_query_servers:json_doc(Doc),
- DocProps
- catch
- throw:Throw ->
- nil
- end.
-
-% this should handle creating the ddoc
-ensure_users_db_exists(DbName) ->
- case couch_db:open(DbName, [{user_ctx, #user_ctx{roles=[<<"_admin">>]}}]) of
- {ok, Db} ->
- ensure_auth_ddoc_exists(Db, <<"_design/_auth">>),
- {ok, Db};
- _Error ->
- {ok, Db} = couch_db:create(DbName, [{user_ctx, #user_ctx{roles=[<<"_admin">>]}}]),
- ensure_auth_ddoc_exists(Db, <<"_design/_auth">>),
- {ok, Db}
+%% @doc proxy auth handler.
+%
+% This handler allows creation of a userCtx object from a user authenticated remotly.
+% The client just pass specific headers to CouchDB and the handler create the userCtx.
+% Headers name can be defined in local.ini. By thefault they are :
+%
+% * X-Auth-CouchDB-UserName : contain the username, (x_auth_username in
+% couch_httpd_auth section)
+% * X-Auth-CouchDB-Roles : contain the user roles, list of roles separated by a
+% comma (x_auth_roles in couch_httpd_auth section)
+% * X-Auth-CouchDB-Token : token to authenticate the authorization (x_auth_token
+% in couch_httpd_auth section). This token is an hmac-sha1 created from secret key
+% and username. The secret key should be the same in the client and couchdb node. s
+% ecret key is the secret key in couch_httpd_auth section of ini. This token is optional
+% if value of proxy_use_secret key in couch_httpd_auth section of ini isn't true.
+%
+proxy_authentification_handler(Req) ->
+ case proxy_auth_user(Req) of
+ nil -> Req;
+ Req2 -> Req2
end.
-ensure_auth_ddoc_exists(Db, DDocId) ->
- try couch_httpd_db:couch_doc_open(Db, DDocId, nil, []) of
- _Foo -> ok
- catch
- _:Error ->
- % create the design document
- {ok, AuthDesign} = auth_design_doc(DDocId),
- {ok, _Rev} = couch_db:update_doc(Db, AuthDesign, []),
- ok
+proxy_auth_user(Req) ->
+ XHeaderUserName = couch_config:get("couch_httpd_auth", "x_auth_username",
+ "X-Auth-CouchDB-UserName"),
+ XHeaderRoles = couch_config:get("couch_httpd_auth", "x_auth_roles",
+ "X-Auth-CouchDB-Roles"),
+ XHeaderToken = couch_config:get("couch_httpd_auth", "x_auth_token",
+ "X-Auth-CouchDB-Token"),
+ case header_value(Req, XHeaderUserName) of
+ undefined -> nil;
+ UserName ->
+ Roles = case header_value(Req, XHeaderRoles) of
+ undefined -> [];
+ Else ->
+ [?l2b(R) || R <- string:tokens(Else, ",")]
+ end,
+ case couch_config:get("couch_httpd_auth", "proxy_use_secret", "false") of
+ "true" ->
+ case couch_config:get("couch_httpd_auth", "secret", nil) of
+ nil ->
+ Req#httpd{user_ctx=#user_ctx{name=?l2b(UserName), roles=Roles}};
+ Secret ->
+ ExpectedToken = couch_util:to_hex(crypto:sha_mac(Secret, UserName)),
+ case header_value(Req, XHeaderToken) of
+ Token when Token == ExpectedToken ->
+ Req#httpd{user_ctx=#user_ctx{name=?l2b(UserName),
+ roles=Roles}};
+ _ -> nil
+ end
+ end;
+ _ ->
+ Req#httpd{user_ctx=#user_ctx{name=?l2b(UserName), roles=Roles}}
+ end
end.
-% add the validation function here
-auth_design_doc(DocId) ->
- DocProps = [
- {<<"_id">>, DocId},
- {<<"language">>,<<"javascript">>},
- {<<"views">>,
- {[{<<"users">>,
- {[{<<"map">>,
- <<"function (doc) {\n if (doc.type == \"user\") {\n emit(doc.username, doc);\n}\n}">>
- }]}
- }]}
- },
- {
- <<"validate_doc_update">>,
- <<"function(newDoc, oldDoc, userCtx) {
- if (newDoc.type != 'user') {
- return;
- } // we only validate user docs for now
- if (!newDoc.username) {
- throw({forbidden : 'doc.username is required'});
- }
- if (!(newDoc.roles && (typeof newDoc.roles.length != 'undefined') )) {
- throw({forbidden : 'doc.roles must be an array'});
- }
- if (newDoc._id != 'org.couchdb.user:'+newDoc.username) {
- throw({forbidden : 'Docid must be of the form org.couchdb.user:username'});
- }
- if (oldDoc) { // validate all updates
- if (oldDoc.username != newDoc.username) {
- throw({forbidden : 'Usernames may not be changed.'});
- }
- }
- if (newDoc.password_sha && !newDoc.salt) {
- throw({forbidden : 'Users with password_sha must have a salt. See /_utils/script/couch.js for example code.'});
- }
- if (userCtx.roles.indexOf('_admin') == -1) { // not an admin
- if (oldDoc) { // validate non-admin updates
- if (userCtx.name != newDoc.username) {
- throw({forbidden : 'You may only update your own user document.'});
- }
- // validate role updates
- var oldRoles = oldDoc.roles.sort();
- var newRoles = newDoc.roles.sort();
- if (oldRoles.length != newRoles.length) {
- throw({forbidden : 'Only _admin may edit roles'});
- }
- for (var i=0; i < oldRoles.length; i++) {
- if (oldRoles[i] != newRoles[i]) {
- throw({forbidden : 'Only _admin may edit roles'});
- }
- };
- } else if (newDoc.roles.length > 0) {
- throw({forbidden : 'Only _admin may set roles'});
- }
- }
- // no system roles in users db
- for (var i=0; i < newDoc.roles.length; i++) {
- if (newDoc.roles[i][0] == '_') {
- throw({forbidden : 'No system roles (starting with underscore) in users db.'});
- }
- };
- // no system names as usernames
- if (newDoc.username[0] == '_') {
- throw({forbidden : 'Username may not start with underscore.'});
- }
- }">>
- }],
- {ok, couch_doc:from_json_obj({DocProps})}.
cookie_authentication_handler(#httpd{mochi_req=MochiReq}=Req) ->
case MochiReq:get_cookie_value("AuthSession") of
undefined -> Req;
[] -> Req;
- Cookie ->
+ Cookie ->
[User, TimeStr | HashParts] = try
AuthSession = couch_util:decodeBase64Url(Cookie),
- [A, B | Cs] = string:tokens(?b2l(AuthSession), ":")
+ [_A, _B | _Cs] = string:tokens(?b2l(AuthSession), ":")
catch
- _:Error ->
+ _:_Error ->
Reason = <<"Malformed AuthSession cookie. Please clear your cookies.">>,
throw({bad_request, Reason})
end,
% Verify expiry and hash
- {NowMS, NowS, _} = erlang:now(),
- CurrentTime = NowMS * 1000000 + NowS,
+ CurrentTime = make_cookie_time(),
case couch_config:get("couch_httpd_auth", "secret", nil) of
- nil ->
- ?LOG_ERROR("cookie auth secret is not set",[]),
+ nil ->
+ ?LOG_DEBUG("cookie auth secret is not set",[]),
Req;
SecretStr ->
Secret = ?l2b(SecretStr),
- case get_user(?l2b(User)) of
+ case couch_auth_cache:get_user_creds(User) of
nil -> Req;
UserProps ->
- UserSalt = proplists:get_value(<<"salt">>, UserProps, <<"">>),
+ UserSalt = couch_util:get_value(<<"salt">>, UserProps, <<"">>),
FullSecret = <<Secret/binary, UserSalt/binary>>,
ExpectedHash = crypto:sha_mac(FullSecret, User ++ ":" ++ TimeStr),
Hash = ?l2b(string:join(HashParts, ":")),
Timeout = to_int(couch_config:get("couch_httpd_auth", "timeout", 600)),
?LOG_DEBUG("timeout ~p", [Timeout]),
case (catch erlang:list_to_integer(TimeStr, 16)) of
- TimeStamp when CurrentTime < TimeStamp + Timeout
- andalso ExpectedHash == Hash ->
- TimeLeft = TimeStamp + Timeout - CurrentTime,
- ?LOG_DEBUG("Successful cookie auth as: ~p", [User]),
- Req#httpd{user_ctx=#user_ctx{
- name=?l2b(User),
- roles=proplists:get_value(<<"roles">>, UserProps, []),
- user_doc=proplists:get_value(<<"user_doc">>, UserProps, null)
- }, auth={FullSecret, TimeLeft < Timeout*0.9}};
+ TimeStamp when CurrentTime < TimeStamp + Timeout ->
+ case couch_util:verify(ExpectedHash, Hash) of
+ true ->
+ TimeLeft = TimeStamp + Timeout - CurrentTime,
+ ?LOG_DEBUG("Successful cookie auth as: ~p", [User]),
+ Req#httpd{user_ctx=#user_ctx{
+ name=?l2b(User),
+ roles=couch_util:get_value(<<"roles">>, UserProps, [])
+ }, auth={FullSecret, TimeLeft < Timeout*0.9}};
+ _Else ->
+ Req
+ end;
_Else ->
Req
end
@@ -285,12 +215,11 @@ cookie_auth_header(#httpd{user_ctx=#user_ctx{name=User}, auth={Secret, true}}, H
% or logout handler.
% The login and logout handlers need to set the AuthSession cookie
% themselves.
- CookieHeader = proplists:get_value("Set-Cookie", Headers, ""),
+ CookieHeader = couch_util:get_value("Set-Cookie", Headers, ""),
Cookies = mochiweb_cookies:parse_cookie(CookieHeader),
- AuthSession = proplists:get_value("AuthSession", Cookies),
+ AuthSession = couch_util:get_value("AuthSession", Cookies),
if AuthSession == undefined ->
- {NowMS, NowS, _} = erlang:now(),
- TimeStamp = NowMS * 1000000 + NowS,
+ TimeStamp = make_cookie_time(),
[cookie_auth_cookie(?b2l(User), Secret, TimeStamp)];
true ->
[]
@@ -318,30 +247,35 @@ ensure_cookie_auth_secret() ->
% session handlers
% Login handler with user db
-% TODO this should also allow a JSON POST
handle_session_req(#httpd{method='POST', mochi_req=MochiReq}=Req) ->
ReqBody = MochiReq:recv_body(),
Form = case MochiReq:get_primary_header_value("content-type") of
+ % content type should be json
"application/x-www-form-urlencoded" ++ _ ->
mochiweb_util:parse_qs(ReqBody);
+ "application/json" ++ _ ->
+ {Pairs} = ?JSON_DECODE(ReqBody),
+ lists:map(fun({Key, Value}) ->
+ {?b2l(Key), ?b2l(Value)}
+ end, Pairs);
_ ->
[]
end,
- UserName = ?l2b(proplists:get_value("username", Form, "")),
- Password = ?l2b(proplists:get_value("password", Form, "")),
+ UserName = ?l2b(couch_util:get_value("name", Form, "")),
+ Password = ?l2b(couch_util:get_value("password", Form, "")),
?LOG_DEBUG("Attempt Login: ~s",[UserName]),
- User = case get_user(UserName) of
+ User = case couch_auth_cache:get_user_creds(UserName) of
nil -> [];
Result -> Result
end,
- UserSalt = proplists:get_value(<<"salt">>, User, <<>>),
+ UserSalt = couch_util:get_value(<<"salt">>, User, <<>>),
PasswordHash = hash_password(Password, UserSalt),
- case proplists:get_value(<<"password_sha">>, User, nil) of
- ExpectedHash when ExpectedHash == PasswordHash ->
+ ExpectedHash = couch_util:get_value(<<"password_sha">>, User, nil),
+ case couch_util:verify(ExpectedHash, PasswordHash) of
+ true ->
% setup the session cookie
Secret = ?l2b(ensure_cookie_auth_secret()),
- {NowMS, NowS, _} = erlang:now(),
- CurrentTime = NowMS * 1000000 + NowS,
+ CurrentTime = make_cookie_time(),
Cookie = cookie_auth_cookie(?b2l(UserName), <<Secret/binary, UserSalt/binary>>, CurrentTime),
% TODO document the "next" feature in Futon
{Code, Headers} = case couch_httpd:qs_value(Req, "next", nil) of
@@ -353,9 +287,8 @@ handle_session_req(#httpd{method='POST', mochi_req=MochiReq}=Req) ->
send_json(Req#httpd{req_body=ReqBody}, Code, Headers,
{[
{ok, true},
- {name, proplists:get_value(<<"username">>, User, null)},
- {roles, proplists:get_value(<<"roles">>, User, [])},
- {user_doc, proplists:get_value(<<"user_doc">>, User, null)}
+ {name, couch_util:get_value(<<"name">>, User, null)},
+ {roles, couch_util:get_value(<<"roles">>, User, [])}
]});
_Else ->
% clear the session
@@ -363,6 +296,7 @@ handle_session_req(#httpd{method='POST', mochi_req=MochiReq}=Req) ->
send_json(Req, 401, [Cookie], {[{error, <<"unauthorized">>},{reason, <<"Name or password is incorrect.">>}]})
end;
% get user info
+% GET /_session
handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req) ->
Name = UserCtx#user_ctx.name,
ForceLogin = couch_httpd:qs_value(Req, "basic", "false"),
@@ -371,15 +305,20 @@ handle_session_req(#httpd{method='GET', user_ctx=UserCtx}=Req) ->
throw({unauthorized, <<"Please login.">>});
{Name, _} ->
send_json(Req, {[
+ % remove this ok
{ok, true},
- {name, Name},
- {roles, UserCtx#user_ctx.roles},
+ {<<"userCtx">>, {[
+ {name, Name},
+ {roles, UserCtx#user_ctx.roles}
+ ]}},
{info, {[
- {user_db, ?l2b(couch_config:get("couch_httpd_auth", "authentication_db"))},
- {handlers, [?l2b(H) || H <- couch_httpd:make_fun_spec_strs(
+ {authentication_db, ?l2b(couch_config:get("couch_httpd_auth", "authentication_db"))},
+ {authentication_handlers, [auth_name(H) || H <- couch_httpd:make_fun_spec_strs(
couch_config:get("httpd", "authentication_handlers"))]}
- ] ++ maybe_value(authenticated, UserCtx#user_ctx.handler)}}
- ] ++ maybe_value(user_doc, UserCtx#user_ctx.user_doc)})
+ ] ++ maybe_value(authenticated, UserCtx#user_ctx.handler, fun(Handler) ->
+ auth_name(?b2l(Handler))
+ end)}}
+ ]})
end;
% logout by deleting the session
handle_session_req(#httpd{method='DELETE'}=Req) ->
@@ -394,12 +333,21 @@ handle_session_req(#httpd{method='DELETE'}=Req) ->
handle_session_req(Req) ->
send_method_not_allowed(Req, "GET,HEAD,POST,DELETE").
-maybe_value(Key, undefined) -> [];
-maybe_value(Key, Else) -> [{Key, Else}].
+maybe_value(_Key, undefined, _Fun) -> [];
+maybe_value(Key, Else, Fun) ->
+ [{Key, Fun(Else)}].
+
+auth_name(String) when is_list(String) ->
+ [_,_,_,_,_,Name|_] = re:split(String, "[\\W_]", [{return, list}]),
+ ?l2b(Name).
to_int(Value) when is_binary(Value) ->
- to_int(?b2l(Value));
+ to_int(?b2l(Value));
to_int(Value) when is_list(Value) ->
list_to_integer(Value);
to_int(Value) when is_integer(Value) ->
Value.
+
+make_cookie_time() ->
+ {NowMS, NowS, _} = erlang:now(),
+ NowMS * 1000000 + NowS.
diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl
index 9233e953..c11d2aef 100644
--- a/src/couchdb/couch_httpd_db.erl
+++ b/src/couchdb/couch_httpd_db.erl
@@ -20,8 +20,8 @@
-import(couch_httpd,
[send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,
- start_json_response/2,start_json_response/3,
- send_chunk/2,end_json_response/1,
+ send_response/4,start_json_response/2,start_json_response/3,
+ send_chunk/2,last_chunk/1,end_json_response/1,
start_chunked_response/3, absolute_uri/2, send/2,
start_response_length/4]).
@@ -55,205 +55,80 @@ handle_request(#httpd{path_parts=[DbName|RestParts],method=Method,
do_db_req(Req, Handler)
end.
-get_changes_timeout(Req, Resp) ->
- DefaultTimeout = list_to_integer(
- couch_config:get("httpd", "changes_timeout", "60000")),
- case couch_httpd:qs_value(Req, "heartbeat") of
- undefined ->
- case couch_httpd:qs_value(Req, "timeout") of
- undefined ->
- {DefaultTimeout, fun() -> stop end};
- TimeoutList ->
- {lists:min([DefaultTimeout, list_to_integer(TimeoutList)]),
- fun() -> stop end}
- end;
- "true" ->
- {DefaultTimeout, fun() -> send_chunk(Resp, "\n"), ok end};
- TimeoutList ->
- {lists:min([DefaultTimeout, list_to_integer(TimeoutList)]),
- fun() -> send_chunk(Resp, "\n"), ok end}
- end.
-
-
-start_sending_changes(_Resp, "continuous") ->
- ok;
-start_sending_changes(Resp, _Else) ->
- send_chunk(Resp, "{\"results\":[\n").
-
-handle_changes_req(#httpd{method='GET',path_parts=[DbName|_]}=Req, Db) ->
- FilterFun = make_filter_fun(Req, Db),
- {ok, Info} = couch_db:get_db_info(Db),
- Seq = proplists:get_value(update_seq, Info),
- {Dir, StartSeq} = case couch_httpd:qs_value(Req, "descending", "false") of
- "false" ->
- {fwd, list_to_integer(couch_httpd:qs_value(Req, "since", "0"))};
- "true" ->
- {rev, Seq};
- _Bad -> throw({bad_request, "descending must be true or false"})
- end,
- Limit = list_to_integer(couch_httpd:qs_value(Req, "limit", "1000000000000000")),
- ResponseType = couch_httpd:qs_value(Req, "feed", "normal"),
- if ResponseType == "continuous" orelse ResponseType == "longpoll" ->
- {ok, Resp} = start_json_response(Req, 200),
- start_sending_changes(Resp, ResponseType),
-
- Self = self(),
- {ok, Notify} = couch_db_update_notifier:start_link(
- fun({_, DbName0}) when DbName0 == DbName ->
- Self ! db_updated;
- (_) ->
- ok
- end),
- {Timeout, TimeoutFun} = get_changes_timeout(Req, Resp),
- couch_stats_collector:track_process_count(Self,
- {httpd, clients_requesting_changes}),
- try
- keep_sending_changes(Req, Resp, Db, StartSeq, <<"">>, Timeout,
- TimeoutFun, ResponseType, Limit, FilterFun)
- after
- couch_db_update_notifier:stop(Notify),
- get_rest_db_updated() % clean out any remaining update messages
- end;
- true ->
- CurrentEtag = couch_httpd:make_etag(Info),
- couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- % send the etag
- {ok, Resp} = start_json_response(Req, 200, [{"Etag", CurrentEtag}]),
- start_sending_changes(Resp, ResponseType),
- {ok, {_, LastSeq, _Prepend, _, _, _, _, _}} =
- send_changes(Req, Resp, Db, Dir, StartSeq, <<"">>, "normal",
- Limit, FilterFun),
- end_sending_changes(Resp, LastSeq, ResponseType)
- end)
- end;
-
+handle_changes_req(#httpd{method='POST'}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
+ handle_changes_req1(Req, Db);
+handle_changes_req(#httpd{method='GET'}=Req, Db) ->
+ handle_changes_req1(Req, Db);
handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
-% waits for a db_updated msg, if there are multiple msgs, collects them.
-wait_db_updated(Timeout, TimeoutFun) ->
- receive db_updated -> get_rest_db_updated()
- after Timeout ->
- case TimeoutFun() of
- ok -> wait_db_updated(Timeout, TimeoutFun);
- stop -> stop
+ send_method_not_allowed(Req, "GET,HEAD,POST").
+
+handle_changes_req1(Req, Db) ->
+ MakeCallback = fun(Resp) ->
+ fun({change, Change, _}, "continuous") ->
+ send_chunk(Resp, [?JSON_ENCODE(Change) | "\n"]);
+ ({change, Change, Prepend}, _) ->
+ send_chunk(Resp, [Prepend, ?JSON_ENCODE(Change)]);
+ (start, "continuous") ->
+ ok;
+ (start, _) ->
+ send_chunk(Resp, "{\"results\":[\n");
+ ({stop, EndSeq}, "continuous") ->
+ send_chunk(
+ Resp,
+ [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"]
+ ),
+ end_json_response(Resp);
+ ({stop, EndSeq}, _) ->
+ send_chunk(
+ Resp,
+ io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq])
+ ),
+ end_json_response(Resp);
+ (timeout, _) ->
+ send_chunk(Resp, "\n")
end
- end.
-
-get_rest_db_updated() ->
- receive db_updated -> get_rest_db_updated()
- after 0 -> updated
- end.
-
-end_sending_changes(Resp, EndSeq, "continuous") ->
- send_chunk(Resp, [?JSON_ENCODE({[{<<"last_seq">>, EndSeq}]}) | "\n"]),
- end_json_response(Resp);
-end_sending_changes(Resp, EndSeq, _Else) ->
- send_chunk(Resp, io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq])),
- end_json_response(Resp).
-
-keep_sending_changes(#httpd{user_ctx=UserCtx,path_parts=[DbName|_]}=Req, Resp,
- Db, StartSeq, Prepend, Timeout, TimeoutFun, ResponseType, Limit, Filter) ->
- {ok, {_, EndSeq, Prepend2, _, _, _, NewLimit, _}} = send_changes(Req, Resp, Db, fwd, StartSeq,
- Prepend, ResponseType, Limit, Filter),
- couch_db:close(Db),
- if
- Limit > NewLimit, ResponseType == "longpoll" ->
- end_sending_changes(Resp, EndSeq, ResponseType);
- true ->
- case wait_db_updated(Timeout, TimeoutFun) of
- updated ->
- case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db2} ->
- keep_sending_changes(Req, Resp, Db2, EndSeq, Prepend2, Timeout,
- TimeoutFun, ResponseType, NewLimit, Filter);
- _Else ->
- end_sending_changes(Resp, EndSeq, ResponseType)
- end;
- stop ->
- end_sending_changes(Resp, EndSeq, ResponseType)
- end
- end.
-
-changes_enumerator(DocInfos, {Db, _, _, FilterFun, Resp, "continuous", Limit, IncludeDocs}) ->
- [#doc_info{id=Id, high_seq=Seq, revs=[#rev_info{deleted=Del,rev=Rev}|_]}|_] = DocInfos,
- Results0 = FilterFun(DocInfos),
- Results = [Result || Result <- Results0, Result /= null],
- Go = if Limit =< 1 -> stop; true -> ok end,
- case Results of
- [] ->
- {Go, {Db, Seq, nil, FilterFun, Resp, "continuous", Limit, IncludeDocs}};
- _ ->
- send_chunk(Resp, [?JSON_ENCODE(changes_row(Db, Seq, Id, Del, Results, Rev, IncludeDocs))
- |"\n"]),
- {Go, {Db, Seq, nil, FilterFun, Resp, "continuous", Limit-1, IncludeDocs}}
- end;
-changes_enumerator(DocInfos, {Db, _, Prepend, FilterFun, Resp, _, Limit, IncludeDocs}) ->
- [#doc_info{id=Id, high_seq=Seq, revs=[#rev_info{deleted=Del,rev=Rev}|_]}|_] = DocInfos,
- Results0 = FilterFun(DocInfos),
- Results = [Result || Result <- Results0, Result /= null],
- Go = if Limit =< 1 -> stop; true -> ok end,
- case Results of
- [] ->
- {Go, {Db, Seq, Prepend, FilterFun, Resp, nil, Limit, IncludeDocs}};
+ end,
+ ChangesArgs = parse_changes_query(Req),
+ ChangesFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
+ WrapperFun = case ChangesArgs#changes_args.feed of
+ "normal" ->
+ {ok, Info} = couch_db:get_db_info(Db),
+ CurrentEtag = couch_httpd:make_etag(Info),
+ fun(FeedChangesFun) ->
+ couch_httpd:etag_respond(
+ Req,
+ CurrentEtag,
+ fun() ->
+ {ok, Resp} = couch_httpd:start_json_response(
+ Req, 200, [{"Etag", CurrentEtag}]
+ ),
+ FeedChangesFun(MakeCallback(Resp))
+ end
+ )
+ end;
_ ->
- send_chunk(Resp, [Prepend, ?JSON_ENCODE(
- changes_row(Db, Seq, Id, Del, Results, Rev, IncludeDocs))]),
- {Go, {Db, Seq, <<",\n">>, FilterFun, Resp, nil, Limit-1, IncludeDocs}}
- end.
+ % "longpoll" or "continuous"
+ {ok, Resp} = couch_httpd:start_json_response(Req, 200),
+ fun(FeedChangesFun) ->
+ FeedChangesFun(MakeCallback(Resp))
+ end
+ end,
+ couch_stats_collector:track_process_count(
+ {httpd, clients_requesting_changes}
+ ),
+ WrapperFun(ChangesFun).
-changes_row(Db, Seq, Id, Del, Results, Rev, true) ->
- {[{seq,Seq},{id,Id},{changes,Results}] ++ deleted_item(Del) ++
- couch_httpd_view:doc_member(Db, {Id, Rev})};
-changes_row(_, Seq, Id, Del, Results, _, false) ->
- {[{seq,Seq},{id,Id},{changes,Results}] ++ deleted_item(Del)}.
-
-deleted_item(true) -> [{deleted,true}];
-deleted_item(_) -> [].
-
-send_changes(Req, Resp, Db, Dir, StartSeq, Prepend, ResponseType, Limit, FilterFun) ->
- Style = list_to_existing_atom(
- couch_httpd:qs_value(Req, "style", "main_only")),
- IncludeDocs = list_to_existing_atom(
- couch_httpd:qs_value(Req, "include_docs", "false")),
- couch_db:changes_since(Db, Style, StartSeq, fun changes_enumerator/2,
- [{dir, Dir}], {Db, StartSeq, Prepend, FilterFun, Resp, ResponseType, Limit, IncludeDocs}).
-
-make_filter_fun(Req, Db) ->
- Filter = couch_httpd:qs_value(Req, "filter", ""),
- case [list_to_binary(couch_httpd:unquote(Part))
- || Part <- string:tokens(Filter, "/")] of
- [] ->
- fun(DocInfos) ->
- % doing this as a batch is more efficient for external filters
- [{[{rev, couch_doc:rev_to_str(Rev)}]} ||
- #doc_info{revs=[#rev_info{rev=Rev}|_]} <- DocInfos]
- end;
- [DName, FName] ->
- DesignId = <<"_design/", DName/binary>>,
- DDoc = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
- % validate that the ddoc has the filter fun
- #doc{body={Props}} = DDoc,
- couch_util:get_nested_json_value({Props}, [<<"filters">>, FName]),
- fun(DocInfos) ->
- Docs = [Doc || {ok, Doc} <- [
- {ok, Doc} = couch_db:open_doc(Db, DInfo, [deleted])
- || DInfo <- DocInfos]],
- {ok, Passes} = couch_query_servers:filter_docs(Req, Db, DDoc, FName, Docs),
- [{[{rev, couch_doc:rev_to_str(Rev)}]}
- || #doc_info{revs=[#rev_info{rev=Rev}|_]} <- DocInfos,
- Pass <- Passes, Pass == true]
- end;
- _Else ->
- throw({bad_request,
- "filter parameter must be of the form `designname/filtername`"})
- end.
-handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, _Db) ->
+handle_compact_req(#httpd{method='POST',path_parts=[DbName,_,Id|_]}=Req, Db) ->
+ ok = couch_db:check_is_admin(Db),
+ couch_httpd:validate_ctype(Req, "application/json"),
ok = couch_view_compactor:start_compact(DbName, Id),
send_json(Req, 202, {[{ok, true}]});
handle_compact_req(#httpd{method='POST'}=Req, Db) ->
+ ok = couch_db:check_is_admin(Db),
+ couch_httpd:validate_ctype(Req, "application/json"),
ok = couch_db:start_compact(Db),
send_json(Req, 202, {[{ok, true}]});
@@ -262,6 +137,8 @@ handle_compact_req(Req, _Db) ->
handle_view_cleanup_req(#httpd{method='POST'}=Req, Db) ->
% delete unreferenced index files
+ ok = couch_db:check_is_admin(Db),
+ couch_httpd:validate_ctype(Req, "application/json"),
ok = couch_view:cleanup_index_files(Db),
send_json(Req, 202, {[{ok, true}]});
@@ -276,7 +153,9 @@ handle_design_req(#httpd{
% load ddoc
DesignId = <<"_design/", DesignName/binary>>,
DDoc = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
- Handler = couch_util:dict_find(Action, DesignUrlHandlers, fun db_req/2),
+ Handler = couch_util:dict_find(Action, DesignUrlHandlers, fun(_, _, _) ->
+ throw({not_found, <<"missing handler: ", Action/binary>>})
+ end),
Handler(Req, Db, DDoc);
handle_design_req(Req, Db) ->
@@ -298,23 +177,13 @@ handle_design_info_req(Req, _Db, _DDoc) ->
create_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
ok = couch_httpd:verify_is_server_admin(Req),
- LDbName = ?b2l(DbName),
- case couch_config:get("couch_httpd_auth", "authentication_db") of
- LDbName ->
- % make sure user's db always has the auth ddoc
- {ok, Db} = couch_httpd_auth:ensure_users_db_exists(DbName),
- couch_db:close(Db),
- DbUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)),
- send_json(Req, 201, [{"Location", DbUrl}], {[{ok, true}]});
- _Else ->
- case couch_server:create(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db} ->
- couch_db:close(Db),
- DbUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)),
- send_json(Req, 201, [{"Location", DbUrl}], {[{ok, true}]});
- Error ->
- throw(Error)
- end
+ case couch_server:create(DbName, [{user_ctx, UserCtx}]) of
+ {ok, Db} ->
+ couch_db:close(Db),
+ DbUrl = absolute_uri(Req, "/" ++ couch_util:url_encode(DbName)),
+ send_json(Req, 201, [{"Location", DbUrl}], {[{ok, true}]});
+ Error ->
+ throw(Error)
end.
delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
@@ -327,15 +196,6 @@ delete_db_req(#httpd{user_ctx=UserCtx}=Req, DbName) ->
end.
do_db_req(#httpd{user_ctx=UserCtx,path_parts=[DbName|_]}=Req, Fun) ->
- LDbName = ?b2l(DbName),
- % I hope this lookup is cheap.
- case couch_config:get("couch_httpd_auth", "authentication_db") of
- LDbName ->
- % make sure user's db always has the auth ddoc
- {ok, ADb} = couch_httpd_auth:ensure_users_db_exists(DbName),
- couch_db:close(ADb);
- _Else -> ok
- end,
case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
{ok, Db} ->
try
@@ -352,6 +212,7 @@ db_req(#httpd{method='GET',path_parts=[_DbName]}=Req, Db) ->
send_json(Req, {DbInfo});
db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
Doc = couch_doc:from_json_obj(couch_httpd:json_body(Req)),
Doc2 = case Doc#doc.id of
<<"">> ->
@@ -392,6 +253,7 @@ db_req(#httpd{path_parts=[_DbName]}=Req, _Db) ->
send_method_not_allowed(Req, "DELETE,GET,HEAD,POST");
db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
UpdateSeq = couch_db:get_update_seq(Db),
CommittedSeq = couch_db:get_committed_update_seq(Db),
{ok, StartTime} =
@@ -404,7 +266,6 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_ensure_full_commit">>]}=Req, Db) -
throw({bad_request,
"can't do a full commit ahead of current update_seq"});
RequiredSeq > CommittedSeq ->
- % user asked for an explicit sequence, don't commit any batches
couch_db:ensure_full_commit(Db);
true ->
{ok, Db#db.instance_start_time}
@@ -420,8 +281,9 @@ db_req(#httpd{path_parts=[_,<<"_ensure_full_commit">>]}=Req, _Db) ->
db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
couch_stats_collector:increment({httpd, bulk_requests}),
+ couch_httpd:validate_ctype(Req, "application/json"),
{JsonProps} = couch_httpd:json_body_obj(Req),
- DocsArray = proplists:get_value(<<"docs">>, JsonProps),
+ DocsArray = couch_util:get_value(<<"docs">>, JsonProps),
case couch_httpd:header_value(Req, "X-Couch-Full-Commit") of
"true" ->
Options = [full_commit];
@@ -430,7 +292,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
_ ->
Options = []
end,
- case proplists:get_value(<<"new_edits">>, JsonProps, true) of
+ case couch_util:get_value(<<"new_edits">>, JsonProps, true) of
true ->
Docs = lists:map(
fun({ObjProps} = JsonObj) ->
@@ -440,7 +302,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
<<>> -> couch_uuids:new();
Id0 -> Id0
end,
- case proplists:get_value(<<"_rev">>, ObjProps) of
+ case couch_util:get_value(<<"_rev">>, ObjProps) of
undefined ->
Revs = {0, []};
Rev ->
@@ -451,7 +313,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
end,
DocsArray),
Options2 =
- case proplists:get_value(<<"all_or_nothing">>, JsonProps) of
+ case couch_util:get_value(<<"all_or_nothing">>, JsonProps) of
true -> [all_or_nothing|Options];
_ -> Options
end,
@@ -467,7 +329,11 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>]}=Req, Db) ->
send_json(Req, 417, ErrorsJson)
end;
false ->
- Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray],
+ Docs = lists:map(fun(JsonObj) ->
+ Doc = couch_doc:from_json_obj(JsonObj),
+ validate_attachment_names(Doc),
+ Doc
+ end, DocsArray),
{ok, Errors} = couch_db:update_docs(Db, Docs, Options, replicated_changes),
ErrorsJson =
lists:map(fun update_doc_result_to_json/1, Errors),
@@ -477,6 +343,7 @@ db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) ->
send_method_not_allowed(Req, "POST");
db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
{IdsRevs} = couch_httpd:json_body_obj(Req),
IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs],
@@ -492,11 +359,13 @@ db_req(#httpd{path_parts=[_,<<"_purge">>]}=Req, _Db) ->
send_method_not_allowed(Req, "POST");
db_req(#httpd{method='GET',path_parts=[_,<<"_all_docs">>]}=Req, Db) ->
- all_docs_view(Req, Db, nil);
+ Keys = couch_httpd:qs_json_value(Req, "keys", nil),
+ all_docs_view(Req, Db, Keys);
db_req(#httpd{method='POST',path_parts=[_,<<"_all_docs">>]}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
{Fields} = couch_httpd:json_body_obj(Req),
- case proplists:get_value(<<"keys">>, Fields, nil) of
+ case couch_util:get_value(<<"keys">>, Fields, nil) of
nil ->
?LOG_DEBUG("POST to _all_docs with no keys member.", []),
all_docs_view(Req, Db, nil);
@@ -521,39 +390,36 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) ->
db_req(#httpd{path_parts=[_,<<"_missing_revs">>]}=Req, _Db) ->
send_method_not_allowed(Req, "POST");
-
db_req(#httpd{method='POST',path_parts=[_,<<"_revs_diff">>]}=Req, Db) ->
{JsonDocIdRevs} = couch_httpd:json_body_obj(Req),
- JsonDocIdRevs2 =
+ JsonDocIdRevs2 =
[{Id, couch_doc:parse_revs(RevStrs)} || {Id, RevStrs} <- JsonDocIdRevs],
{ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs2),
- Results2 =
+ Results2 =
lists:map(fun({Id, MissingRevs, PossibleAncestors}) ->
{Id,
- {[{missing, couch_doc:revs_to_strs(MissingRevs)}] ++
+ {[{missing, couch_doc:revs_to_strs(MissingRevs)}] ++
if PossibleAncestors == [] ->
[];
- true ->
- [{possible_ancestors,
+ true ->
+ [{possible_ancestors,
couch_doc:revs_to_strs(PossibleAncestors)}]
end}}
end, Results),
- send_json(Req, {[{missing, {Results2}}]});
+ send_json(Req, {Results2});
db_req(#httpd{path_parts=[_,<<"_revs_diff">>]}=Req, _Db) ->
send_method_not_allowed(Req, "POST");
-
-db_req(#httpd{method='PUT',path_parts=[_,<<"_admins">>]}=Req,
- Db) ->
- Admins = couch_httpd:json_body(Req),
- ok = couch_db:set_admins(Db, Admins),
+db_req(#httpd{method='PUT',path_parts=[_,<<"_security">>]}=Req, Db) ->
+ SecObj = couch_httpd:json_body(Req),
+ ok = couch_db:set_security(Db, SecObj),
send_json(Req, {[{<<"ok">>, true}]});
-db_req(#httpd{method='GET',path_parts=[_,<<"_admins">>]}=Req, Db) ->
- send_json(Req, couch_db:get_admins(Db));
+db_req(#httpd{method='GET',path_parts=[_,<<"_security">>]}=Req, Db) ->
+ send_json(Req, couch_db:get_security(Db));
-db_req(#httpd{path_parts=[_,<<"_admins">>]}=Req, _Db) ->
+db_req(#httpd{path_parts=[_,<<"_security">>]}=Req, _Db) ->
send_method_not_allowed(Req, "PUT,GET");
db_req(#httpd{method='PUT',path_parts=[_,<<"_revs_limit">>]}=Req,
@@ -620,7 +486,7 @@ all_docs_view(Req, Db, Keys) ->
CurrentEtag = couch_httpd:make_etag(Info),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- TotalRowCount = proplists:get_value(doc_count, Info),
+ TotalRowCount = couch_util:get_value(doc_count, Info),
StartId = if is_binary(StartKey) -> StartKey;
true -> StartDocId
end,
@@ -628,10 +494,16 @@ all_docs_view(Req, Db, Keys) ->
true -> EndDocId
end,
FoldAccInit = {Limit, SkipCount, undefined, []},
-
+ UpdateSeq = couch_db:get_update_seq(Db),
+ JsonParams = case couch_httpd:qs_value(Req, "update_seq") of
+ "true" ->
+ [{update_seq, UpdateSeq}];
+ _Else ->
+ []
+ end,
case Keys of
nil ->
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db,
+ FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, UpdateSeq,
TotalRowCount, #view_fold_helper_funs{
reduce_count = fun couch_db:enum_docs_reduce_to_count/1
}),
@@ -646,9 +518,9 @@ all_docs_view(Req, Db, Keys) ->
{ok, LastOffset, FoldResult} = couch_db:enum_docs(Db,
AdapterFun, FoldAccInit, [{start_key, StartId}, {dir, Dir},
{if Inclusive -> end_key; true -> end_key_gt end, EndId}]),
- couch_httpd_view:finish_view_fold(Req, TotalRowCount, LastOffset, FoldResult);
+ couch_httpd_view:finish_view_fold(Req, TotalRowCount, LastOffset, FoldResult, JsonParams);
_ ->
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db,
+ FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, UpdateSeq,
TotalRowCount, #view_fold_helper_funs{
reduce_count = fun(Offset) -> Offset end
}),
@@ -675,7 +547,7 @@ all_docs_view(Req, Db, Keys) ->
{_, FoldAcc2} = FoldlFun(Doc, 0, FoldAcc),
FoldAcc2
end, FoldAccInit, Keys),
- couch_httpd_view:finish_view_fold(Req, TotalRowCount, 0, FoldResult)
+ couch_httpd_view:finish_view_fold(Req, TotalRowCount, 0, FoldResult, JsonParams)
end
end).
@@ -684,72 +556,73 @@ db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) ->
couch_doc_open(Db, DocId, nil, []),
case couch_httpd:qs_value(Req, "rev") of
undefined ->
- update_doc(Req, Db, DocId,
+ update_doc(Req, Db, DocId,
couch_doc_from_req(Req, DocId, {[{<<"_deleted">>,true}]}));
Rev ->
- update_doc(Req, Db, DocId,
- couch_doc_from_req(Req, DocId,
+ update_doc(Req, Db, DocId,
+ couch_doc_from_req(Req, DocId,
{[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]}))
end;
-db_doc_req(#httpd{method='GET'}=Req, Db, DocId) ->
+db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) ->
#doc_query_args{
rev = Rev,
open_revs = Revs,
- options = Options,
+ options = Options1,
atts_since = AttsSince
} = parse_doc_query(Req),
+ Options = case AttsSince of
+ nil ->
+ Options1;
+ RevList when is_list(RevList) ->
+ [{atts_since, RevList}, attachments | Options1]
+ end,
case Revs of
[] ->
Doc = couch_doc_open(Db, DocId, Rev, Options),
- Options2 =
- if AttsSince /= nil ->
- RevPos = find_ancestor_rev_pos(Doc#doc.revs, AttsSince),
- [{atts_after_revpos, RevPos} | Options];
- true -> Options
- end,
- send_doc(Req, Doc, Options2);
+ send_doc(Req, Doc, Options);
_ ->
{ok, Results} = couch_db:open_doc_revs(Db, DocId, Revs, Options),
- {ok, Resp} = start_json_response(Req, 200),
- send_chunk(Resp, "["),
- % We loop through the docs. The first time through the separator
- % is whitespace, then a comma on subsequent iterations.
- lists:foldl(
- fun(Result, AccSeparator) ->
- case Result of
- {ok, Doc} ->
- JsonDoc = couch_doc:to_json_obj(Doc, Options),
- Json = ?JSON_ENCODE({[{ok, JsonDoc}]}),
- send_chunk(Resp, AccSeparator ++ Json);
- {{not_found, missing}, RevId} ->
- RevStr = couch_doc:rev_to_str(RevId),
- Json = ?JSON_ENCODE({[{"missing", RevStr}]}),
- send_chunk(Resp, AccSeparator ++ Json)
+ case MochiReq:accepts_content_type("multipart/mixed") of
+ false ->
+ {ok, Resp} = start_json_response(Req, 200),
+ send_chunk(Resp, "["),
+ % We loop through the docs. The first time through the separator
+ % is whitespace, then a comma on subsequent iterations.
+ lists:foldl(
+ fun(Result, AccSeparator) ->
+ case Result of
+ {ok, Doc} ->
+ JsonDoc = couch_doc:to_json_obj(Doc, Options),
+ Json = ?JSON_ENCODE({[{ok, JsonDoc}]}),
+ send_chunk(Resp, AccSeparator ++ Json);
+ {{not_found, missing}, RevId} ->
+ RevStr = couch_doc:rev_to_str(RevId),
+ Json = ?JSON_ENCODE({[{"missing", RevStr}]}),
+ send_chunk(Resp, AccSeparator ++ Json)
+ end,
+ "," % AccSeparator now has a comma
end,
- "," % AccSeparator now has a comma
- end,
- "", Results),
- send_chunk(Resp, "]"),
- end_json_response(Resp)
+ "", Results),
+ send_chunk(Resp, "]"),
+ end_json_response(Resp);
+ true ->
+ send_docs_multipart(Req, Results, Options)
+ end
end;
+
db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
+ couch_httpd:validate_referer(Req),
couch_doc:validate_docid(DocId),
- case couch_httpd:header_value(Req, "Content-Type") of
- "multipart/form-data" ++ _Rest ->
- ok;
- _Else ->
- throw({bad_ctype, <<"Invalid Content-Type header for form upload">>})
- end,
+ couch_httpd:validate_ctype(Req, "multipart/form-data"),
Form = couch_httpd:parse_form(Req),
- case proplists:is_defined("_doc", Form) of
- true ->
- Json = ?JSON_DECODE(proplists:get_value("_doc", Form)),
- Doc = couch_doc_from_req(Req, DocId, Json);
- false ->
- Rev = couch_doc:parse_rev(list_to_binary(proplists:get_value("_rev", Form))),
- {ok, [{ok, Doc}]} = couch_db:open_doc_revs(Db, DocId, [Rev], [])
+ case couch_util:get_value("_doc", Form) of
+ undefined ->
+ Rev = couch_doc:parse_rev(couch_util:get_value("_rev", Form)),
+ {ok, [{ok, Doc}]} = couch_db:open_doc_revs(Db, DocId, [Rev], []);
+ Json ->
+ Doc = couch_doc_from_req(Req, DocId, ?JSON_DECODE(Json))
end,
UpdatedAtts = [
#att{name=validate_attachment_name(Name),
@@ -785,13 +658,13 @@ db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
Loc = absolute_uri(Req, "/" ++ ?b2l(Db#db.name) ++ "/" ++ ?b2l(DocId)),
RespHeaders = [{"Location", Loc}],
- case couch_httpd:header_value(Req, "Content-Type") of
- ("multipart/related" ++ _Rest) = ContentType->
- Doc0 = couch_doc:doc_from_multi_part_stream(ContentType,
+ case couch_util:to_list(couch_httpd:header_value(Req, "Content-Type")) of
+ ("multipart/related;" ++ _) = ContentType ->
+ {ok, Doc0} = couch_doc:doc_from_multi_part_stream(ContentType,
fun() -> receive_request_data(Req) end),
Doc = couch_doc_from_req(Req, DocId, Doc0),
update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType);
- _ ->
+ _Else ->
case couch_httpd:qs_value(Req, "batch") of
"ok" ->
% batch
@@ -810,7 +683,8 @@ db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
]});
_Normal ->
% normal
- Doc = couch_doc_from_req(Req, DocId, couch_httpd:json_body(Req)),
+ Body = couch_httpd:json_body(Req),
+ Doc = couch_doc_from_req(Req, DocId, Body),
update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType)
end
end;
@@ -835,24 +709,13 @@ db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) ->
db_doc_req(Req, _Db, _DocId) ->
send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY").
-find_ancestor_rev_pos({_, []}, _AttsSinceRevs) ->
- 0;
-find_ancestor_rev_pos(_DocRevs, []) ->
- 0;
-find_ancestor_rev_pos({RevPos, [RevId|Rest]}, AttsSinceRevs) ->
- case lists:member({RevPos, RevId}, AttsSinceRevs) of
- true ->
- RevPos;
- false ->
- find_ancestor_rev_pos({RevPos - 1, Rest}, AttsSinceRevs)
- end.
send_doc(Req, Doc, Options) ->
case Doc#doc.meta of
[] ->
DiskEtag = couch_httpd:doc_etag(Doc),
% output etag only when we have no meta
- couch_httpd:etag_respond(Req, DiskEtag, fun() ->
+ couch_httpd:etag_respond(Req, DiskEtag, fun() ->
send_doc_efficiently(Req, Doc, [{"Etag", DiskEtag}], Options)
end);
_ ->
@@ -862,39 +725,84 @@ send_doc(Req, Doc, Options) ->
send_doc_efficiently(Req, #doc{atts=[]}=Doc, Headers, Options) ->
send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options));
-send_doc_efficiently(Req, #doc{atts=Atts}=Doc, Headers, Options) ->
- case lists:member(attachments, Options) orelse
- proplists:is_defined(atts_after_revpos, Options) of
+send_doc_efficiently(#httpd{mochi_req = MochiReq} = Req,
+ #doc{atts = Atts} = Doc, Headers, Options) ->
+ case lists:member(attachments, Options) of
true ->
- AcceptedTypes = case couch_httpd:header_value(Req, "Accept") of
- undefined -> [];
- AcceptHeader -> string:tokens(AcceptHeader, ", ")
- end,
- case lists:member("multipart/related", AcceptedTypes) of
+ case MochiReq:accepts_content_type("multipart/related") of
false ->
- send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options));
+ send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options));
true ->
Boundary = couch_uuids:random(),
- JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc, [follows|Options])),
- AttsSinceRevPos = proplists:get_value(atts_after_revpos, Options, 0),
- Len = couch_doc:len_doc_to_multi_part_stream(Boundary,JsonBytes,Atts,
- AttsSinceRevPos),
- CType = {<<"Content-Type">>,
- <<"multipart/related; boundary=\"", Boundary/binary, "\"">>},
+ JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc,
+ [attachments, follows|Options])),
+ {ContentType, Len} = couch_doc:len_doc_to_multi_part_stream(
+ Boundary,JsonBytes, Atts,false),
+ CType = {<<"Content-Type">>, ContentType},
{ok, Resp} = start_response_length(Req, 200, [CType|Headers], Len),
couch_doc:doc_to_multi_part_stream(Boundary,JsonBytes,Atts,
- AttsSinceRevPos,
- fun(Data) -> couch_httpd:send(Resp, Data) end)
+ fun(Data) -> couch_httpd:send(Resp, Data) end, false)
end;
false ->
- send_json(Req, 200, [], couch_doc:to_json_obj(Doc, Options))
+ send_json(Req, 200, Headers, couch_doc:to_json_obj(Doc, Options))
end.
-
+send_docs_multipart(Req, Results, Options) ->
+ OuterBoundary = couch_uuids:random(),
+ InnerBoundary = couch_uuids:random(),
+ CType = {"Content-Type",
+ "multipart/mixed; boundary=\"" ++ ?b2l(OuterBoundary) ++ "\""},
+ {ok, Resp} = start_chunked_response(Req, 200, [CType]),
+ couch_httpd:send_chunk(Resp, <<"--", OuterBoundary/binary>>),
+ lists:foreach(
+ fun({ok, #doc{atts=Atts}=Doc}) ->
+ JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc,
+ [attachments,follows|Options])),
+ {ContentType, _Len} = couch_doc:len_doc_to_multi_part_stream(
+ InnerBoundary, JsonBytes, Atts, false),
+ couch_httpd:send_chunk(Resp, <<"\r\nContent-Type: ",
+ ContentType/binary, "\r\n\r\n">>),
+ couch_doc:doc_to_multi_part_stream(InnerBoundary, JsonBytes, Atts,
+ fun(Data) -> couch_httpd:send_chunk(Resp, Data)
+ end, false),
+ couch_httpd:send_chunk(Resp, <<"\r\n--", OuterBoundary/binary>>);
+ ({{not_found, missing}, RevId}) ->
+ RevStr = couch_doc:rev_to_str(RevId),
+ Json = ?JSON_ENCODE({[{"missing", RevStr}]}),
+ couch_httpd:send_chunk(Resp,
+ [<<"\r\nContent-Type: application/json; error=\"true\"\r\n\r\n">>,
+ Json,
+ <<"\r\n--", OuterBoundary/binary>>])
+ end, Results),
+ couch_httpd:send_chunk(Resp, <<"--">>),
+ couch_httpd:last_chunk(Resp).
+
+send_ranges_multipart(Req, ContentType, Len, Att, Ranges) ->
+ Boundary = couch_uuids:random(),
+ CType = {"Content-Type",
+ "multipart/byteranges; boundary=\"" ++ ?b2l(Boundary) ++ "\""},
+ {ok, Resp} = start_chunked_response(Req, 206, [CType]),
+ couch_httpd:send_chunk(Resp, <<"--", Boundary/binary>>),
+ lists:foreach(fun({From, To}) ->
+ ContentRange = make_content_range(From, To, Len),
+ couch_httpd:send_chunk(Resp,
+ <<"\r\nContent-Type: ", ContentType/binary, "\r\n",
+ "Content-Range: ", ContentRange/binary, "\r\n",
+ "\r\n">>),
+ couch_doc:range_att_foldl(Att, From, To + 1,
+ fun(Seg, _) -> send_chunk(Resp, Seg) end, {ok, Resp}),
+ couch_httpd:send_chunk(Resp, <<"\r\n--", Boundary/binary>>)
+ end, Ranges),
+ couch_httpd:send_chunk(Resp, <<"--">>),
+ couch_httpd:last_chunk(Resp),
+ {ok, Resp}.
receive_request_data(Req) ->
{couch_httpd:recv(Req, 0), fun() -> receive_request_data(Req) end}.
+make_content_range(From, To, Len) ->
+ ?l2b(io_lib:format("bytes ~B-~B/~B", [From, To, Len])).
+
update_doc_result_to_json({{Id, Rev}, Error}) ->
{_Code, Err, Msg} = couch_httpd:error_info(Error),
{[{id, Id}, {rev, couch_doc:rev_to_str(Rev)},
@@ -943,6 +851,8 @@ couch_doc_from_req(Req, DocId, #doc{revs=Revs}=Doc) ->
case extract_header_rev(Req, ExplicitDocRev) of
missing_rev ->
Revs2 = {0, []};
+ ExplicitDocRev ->
+ Revs2 = Revs;
{Pos, Rev} ->
Revs2 = {Pos, [Rev]}
end,
@@ -977,7 +887,7 @@ couch_doc_open(Db, DocId, Rev, Options) ->
% Attachment request handlers
-db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) ->
+db_attachment_req(#httpd{method='GET',mochi_req=MochiReq}=Req, Db, DocId, FileNameParts) ->
FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")),
#doc_query_args{
rev=Rev,
@@ -989,24 +899,83 @@ db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) ->
case [A || A <- Atts, A#att.name == FileName] of
[] ->
throw({not_found, "Document is missing attachment"});
- [#att{type=Type, len=Len}=Att] ->
+ [#att{type=Type, encoding=Enc, disk_len=DiskLen, att_len=AttLen}=Att] ->
Etag = couch_httpd:doc_etag(Doc),
- couch_httpd:etag_respond(Req, Etag, fun() ->
- {ok, Resp} = start_response_length(Req, 200, [
- {"ETag", Etag},
- {"Cache-Control", "must-revalidate"},
- {"Content-Type", binary_to_list(Type)}
- ], integer_to_list(Len)),
- couch_doc:att_foldl(
- Att,
- fun(BinSegment, _) -> send(Resp, BinSegment) end,
- {ok, Resp} % Seed in case of 0 byte attachment.
- )
- end)
+ ReqAcceptsAttEnc = lists:member(
+ atom_to_list(Enc),
+ couch_httpd:accepted_encodings(Req)
+ ),
+ Len = case {Enc, ReqAcceptsAttEnc} of
+ {identity, _} ->
+ % stored and served in identity form
+ DiskLen;
+ {_, false} when DiskLen =/= AttLen ->
+ % Stored encoded, but client doesn't accept the encoding we used,
+ % so we need to decode on the fly. DiskLen is the identity length
+ % of the attachment.
+ DiskLen;
+ {_, true} ->
+ % Stored and served encoded. AttLen is the encoded length.
+ AttLen;
+ _ ->
+ % We received an encoded attachment and stored it as such, so we
+ % don't know the identity length. The client doesn't accept the
+ % encoding, and since we cannot serve a correct Content-Length
+ % header we'll fall back to a chunked response.
+ undefined
+ end,
+ Headers = [
+ {"ETag", Etag},
+ {"Cache-Control", "must-revalidate"},
+ {"Content-Type", binary_to_list(Type)}
+ ] ++ case ReqAcceptsAttEnc of
+ true ->
+ [{"Content-Encoding", atom_to_list(Enc)}];
+ _ ->
+ []
+ end ++ case Enc of
+ identity ->
+ [{"Accept-Ranges", "bytes"}];
+ _ ->
+ [{"Accept-Ranges", "none"}]
+ end,
+ AttFun = case ReqAcceptsAttEnc of
+ false ->
+ fun couch_doc:att_foldl_decode/3;
+ true ->
+ fun couch_doc:att_foldl/3
+ end,
+ couch_httpd:etag_respond(
+ Req,
+ Etag,
+ fun() ->
+ case Len of
+ undefined ->
+ {ok, Resp} = start_chunked_response(Req, 200, Headers),
+ AttFun(Att, fun(Seg, _) -> send_chunk(Resp, Seg) end, {ok, Resp}),
+ last_chunk(Resp);
+ _ ->
+ Ranges = parse_ranges(MochiReq:get(range), Len),
+ case {Enc, Ranges} of
+ {identity, [{From, To}]} ->
+ Headers1 = [{<<"Content-Range">>, make_content_range(From, To, Len)}]
+ ++ Headers,
+ {ok, Resp} = start_response_length(Req, 206, Headers1, To - From + 1),
+ couch_doc:range_att_foldl(Att, From, To + 1,
+ fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp});
+ {identity, Ranges} when is_list(Ranges) ->
+ send_ranges_multipart(Req, Type, Len, Att, Ranges);
+ _ ->
+ {ok, Resp} = start_response_length(Req, 200, Headers, Len),
+ AttFun(Att, fun(Seg, _) -> send(Resp, Seg) end, {ok, Resp})
+ end
+ end
+ end
+ )
end;
-db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts)
+db_attachment_req(#httpd{method=Method,mochi_req=MochiReq}=Req, Db, DocId, FileNameParts)
when (Method == 'PUT') or (Method == 'DELETE') ->
FileName = validate_attachment_name(
mochiweb_util:join(
@@ -1040,17 +1009,42 @@ db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts)
0 ->
<<"">>;
Length when is_integer(Length) ->
- fun() -> couch_httpd:recv(Req, 0) end;
- Length ->
- exit({length_not_integer, Length})
+ Expect = case couch_httpd:header_value(Req, "expect") of
+ undefined ->
+ undefined;
+ Value when is_list(Value) ->
+ string:to_lower(Value)
+ end,
+ case Expect of
+ "100-continue" ->
+ MochiReq:start_raw_response({100, gb_trees:empty()});
+ _Else ->
+ ok
+ end,
+
+
+ fun() -> couch_httpd:recv(Req, 0) end
end,
- len = case couch_httpd:header_value(Req,"Content-Length") of
+ att_len = case couch_httpd:header_value(Req,"Content-Length") of
undefined ->
undefined;
Length ->
list_to_integer(Length)
end,
- md5 = get_md5_header(Req)
+ md5 = get_md5_header(Req),
+ encoding = case string:to_lower(string:strip(
+ couch_httpd:header_value(Req,"Content-Encoding","identity")
+ )) of
+ "identity" ->
+ identity;
+ "gzip" ->
+ gzip;
+ _ ->
+ throw({
+ bad_ctype,
+ "Only gzip and identity content-encodings are supported"
+ })
+ end
}]
end,
@@ -1076,7 +1070,8 @@ db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts)
'DELETE' ->
{200, []};
_ ->
- {201, [{"Location", absolute_uri(Req, "/" ++
+ {201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(UpdatedRev)) ++ "\""},
+ {"Location", absolute_uri(Req, "/" ++
binary_to_list(DbName) ++ "/" ++
binary_to_list(DocId) ++ "/" ++
binary_to_list(FileName)
@@ -1091,6 +1086,25 @@ db_attachment_req(#httpd{method=Method}=Req, Db, DocId, FileNameParts)
db_attachment_req(Req, _Db, _DocId, _FileNameParts) ->
send_method_not_allowed(Req, "DELETE,GET,HEAD,PUT").
+parse_ranges(undefined, _Len) ->
+ undefined;
+parse_ranges(fail, _Len) ->
+ undefined;
+parse_ranges(Ranges, Len) ->
+ parse_ranges(Ranges, Len, []).
+
+parse_ranges([], _Len, Acc) ->
+ lists:reverse(Acc);
+parse_ranges([{From, To}|_], _Len, _Acc) when is_integer(From) andalso is_integer(To) andalso To < From ->
+ throw(requested_range_not_satisfiable);
+parse_ranges([{From, To}|Rest], Len, Acc) when is_integer(To) andalso To >= Len ->
+ parse_ranges([{From, Len-1}] ++ Rest, Len, Acc);
+parse_ranges([{none, To}|Rest], Len, Acc) ->
+ parse_ranges([{Len - To, Len - 1}] ++ Rest, Len, Acc);
+parse_ranges([{From, none}|Rest], Len, Acc) ->
+ parse_ranges([{From, Len - 1}] ++ Rest, Len, Acc);
+parse_ranges([{From,To}|Rest], Len, Acc) ->
+ parse_ranges(Rest, Len, [{From, To}] ++ Acc).
get_md5_header(Req) ->
ContentMD5 = couch_httpd:header_value(Req, "Content-MD5"),
@@ -1112,17 +1126,6 @@ get_md5_header(Req) ->
<<>>
end.
-parse_doc_format(FormatStr) when is_binary(FormatStr) ->
- parse_doc_format(?b2l(FormatStr));
-parse_doc_format(FormatStr) when is_list(FormatStr) ->
- SplitFormat = lists:splitwith(fun($/) -> false; (_) -> true end, FormatStr),
- case SplitFormat of
- {DesignName, [$/ | ShowName]} -> {?l2b(DesignName), ?l2b(ShowName)};
- _Else -> throw({bad_request, <<"Invalid doc format">>})
- end;
-parse_doc_format(_BadFormatStr) ->
- throw({bad_request, <<"Invalid doc format">>}).
-
parse_doc_query(Req) ->
lists:foldl(fun({Key,Value}, Args) ->
case {Key, Value} of
@@ -1161,11 +1164,41 @@ parse_doc_query(Req) ->
Args#doc_query_args{update_type=replicated_changes};
{"new_edits", "true"} ->
Args#doc_query_args{update_type=interactive_edit};
+ {"att_encoding_info", "true"} ->
+ Options = [att_encoding_info | Args#doc_query_args.options],
+ Args#doc_query_args{options=Options};
_Else -> % unknown key value pair, ignore.
Args
end
end, #doc_query_args{}, couch_httpd:qs(Req)).
+parse_changes_query(Req) ->
+ lists:foldl(fun({Key, Value}, Args) ->
+ case {Key, Value} of
+ {"feed", _} ->
+ Args#changes_args{feed=Value};
+ {"descending", "true"} ->
+ Args#changes_args{dir=rev};
+ {"since", _} ->
+ Args#changes_args{since=list_to_integer(Value)};
+ {"limit", _} ->
+ Args#changes_args{limit=list_to_integer(Value)};
+ {"style", _} ->
+ Args#changes_args{style=list_to_existing_atom(Value)};
+ {"heartbeat", "true"} ->
+ Args#changes_args{heartbeat=true};
+ {"heartbeat", _} ->
+ Args#changes_args{heartbeat=list_to_integer(Value)};
+ {"timeout", _} ->
+ Args#changes_args{timeout=list_to_integer(Value)};
+ {"include_docs", "true"} ->
+ Args#changes_args{include_docs=true};
+ {"filter", _} ->
+ Args#changes_args{filter=Value};
+ _Else -> % unknown key value pair, ignore.
+ Args
+ end
+ end, #changes_args{}, couch_httpd:qs(Req)).
extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)->
extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev));
@@ -1185,15 +1218,19 @@ extract_header_rev(Req, ExplicitRev) ->
parse_copy_destination_header(Req) ->
- Destination = couch_httpd:header_value(Req, "Destination"),
- case re:run(Destination, "\\?", [{capture, none}]) of
- nomatch ->
- {list_to_binary(Destination), {0, []}};
- match ->
- [DocId, RevQs] = re:split(Destination, "\\?", [{return, list}]),
- [_RevQueryKey, Rev] = re:split(RevQs, "=", [{return, list}]),
- {Pos, RevId} = couch_doc:parse_rev(Rev),
- {list_to_binary(DocId), {Pos, [RevId]}}
+ case couch_httpd:header_value(Req, "Destination") of
+ undefined ->
+ throw({bad_request, "Destination header in mandatory for COPY."});
+ Destination ->
+ case re:run(Destination, "\\?", [{capture, none}]) of
+ nomatch ->
+ {list_to_binary(Destination), {0, []}};
+ match ->
+ [DocId, RevQs] = re:split(Destination, "\\?", [{return, list}]),
+ [_RevQueryKey, Rev] = re:split(RevQs, "=", [{return, list}]),
+ {Pos, RevId} = couch_doc:parse_rev(Rev),
+ {list_to_binary(DocId), {Pos, [RevId]}}
+ end
end.
validate_attachment_names(Doc) ->
diff --git a/src/couchdb/couch_httpd_external.erl b/src/couchdb/couch_httpd_external.erl
index efbe87b8..f8d6f171 100644
--- a/src/couchdb/couch_httpd_external.erl
+++ b/src/couchdb/couch_httpd_external.erl
@@ -56,6 +56,7 @@ process_external_req(HttpReq, Db, Name) ->
json_req_obj(Req, Db) -> json_req_obj(Req, Db, null).
json_req_obj(#httpd{mochi_req=Req,
method=Method,
+ requested_path_parts=RequestedPath,
path_parts=Path,
req_body=ReqBody
}, Db, DocId) ->
@@ -72,18 +73,22 @@ json_req_obj(#httpd{mochi_req=Req,
Headers = Req:get(headers),
Hlist = mochiweb_headers:to_list(Headers),
{ok, Info} = couch_db:get_db_info(Db),
- % add headers...
+
+% add headers...
{[{<<"info">>, {Info}},
{<<"id">>, DocId},
+ {<<"uuid">>, couch_uuids:new()},
{<<"method">>, Method},
+ {<<"requested_path">>, RequestedPath},
{<<"path">>, Path},
- {<<"query">>, to_json_terms(Req:parse_qs())},
+ {<<"query">>, json_query_keys(to_json_terms(Req:parse_qs()))},
{<<"headers">>, to_json_terms(Hlist)},
{<<"body">>, Body},
{<<"peer">>, ?l2b(Req:get(peer))},
{<<"form">>, to_json_terms(ParsedForm)},
{<<"cookie">>, to_json_terms(Req:parse_cookie())},
- {<<"userCtx">>, couch_util:json_user_ctx(Db)}]}.
+ {<<"userCtx">>, couch_util:json_user_ctx(Db)},
+ {<<"secObj">>, couch_db:get_security(Db)}]}.
to_json_terms(Data) ->
to_json_terms(Data, []).
@@ -95,6 +100,19 @@ to_json_terms([{Key, Value} | Rest], Acc) when is_atom(Key) ->
to_json_terms([{Key, Value} | Rest], Acc) ->
to_json_terms(Rest, [{list_to_binary(Key), list_to_binary(Value)} | Acc]).
+json_query_keys({Json}) ->
+ json_query_keys(Json, []).
+json_query_keys([], Acc) ->
+ {lists:reverse(Acc)};
+json_query_keys([{<<"startkey">>, Value} | Rest], Acc) ->
+ json_query_keys(Rest, [{<<"startkey">>, couch_util:json_decode(Value)}|Acc]);
+json_query_keys([{<<"endkey">>, Value} | Rest], Acc) ->
+ json_query_keys(Rest, [{<<"endkey">>, couch_util:json_decode(Value)}|Acc]);
+json_query_keys([{<<"key">>, Value} | Rest], Acc) ->
+ json_query_keys(Rest, [{<<"key">>, couch_util:json_decode(Value)}|Acc]);
+json_query_keys([Term | Rest], Acc) ->
+ json_query_keys(Rest, [Term|Acc]).
+
send_external_response(#httpd{mochi_req=MochiReq}=Req, Response) ->
#extern_resp_args{
code = Code,
@@ -124,7 +142,7 @@ parse_external_response({Response}) ->
Args#extern_resp_args{data=Value, ctype="text/html; charset=utf-8"};
{<<"base64">>, Value} ->
Args#extern_resp_args{
- data=couch_util:decodeBase64(Value),
+ data=base64:decode(Value),
ctype="application/binary"
};
{<<"headers">>, {Headers}} ->
diff --git a/src/couchdb/couch_httpd_misc_handlers.erl b/src/couchdb/couch_httpd_misc_handlers.erl
index 2d67b321..6cc48f51 100644
--- a/src/couchdb/couch_httpd_misc_handlers.erl
+++ b/src/couchdb/couch_httpd_misc_handlers.erl
@@ -15,7 +15,7 @@
-export([handle_welcome_req/2,handle_favicon_req/2,handle_utils_dir_req/2,
handle_all_dbs_req/1,handle_replicate_req/1,handle_restart_req/1,
handle_uuids_req/1,handle_config_req/1,handle_log_req/1,
- handle_task_status_req/1,handle_sleep_req/1]).
+ handle_task_status_req/1]).
-export([increment_update_seq_req/2]).
@@ -46,6 +46,7 @@ handle_favicon_req(#httpd{method='GET'}=Req, DocumentRoot) ->
{"Expires", httpd_util:rfc1123_date(OneYearFromNow)}
],
couch_httpd:serve_file(Req, "favicon.ico", DocumentRoot, CachingHeaders);
+
handle_favicon_req(Req, _) ->
send_method_not_allowed(Req, "GET,HEAD").
@@ -63,13 +64,6 @@ handle_utils_dir_req(#httpd{method='GET'}=Req, DocumentRoot) ->
handle_utils_dir_req(Req, _) ->
send_method_not_allowed(Req, "GET,HEAD").
-handle_sleep_req(#httpd{method='GET'}=Req) ->
- Time = list_to_integer(couch_httpd:qs_value(Req, "time")),
- receive snicklefart -> ok after Time -> ok end,
- send_json(Req, {[{ok, true}]});
-handle_sleep_req(Req) ->
- send_method_not_allowed(Req, "GET,HEAD").
-
handle_all_dbs_req(#httpd{method='GET'}=Req) ->
{ok, DbNames} = couch_server:all_databases(),
send_json(Req, DbNames);
@@ -85,25 +79,38 @@ handle_task_status_req(Req) ->
send_method_not_allowed(Req, "GET,HEAD").
handle_replicate_req(#httpd{method='POST'}=Req) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
PostBody = couch_httpd:json_body_obj(Req),
try couch_rep:replicate(PostBody, Req#httpd.user_ctx) of
{ok, {continuous, RepId}} ->
send_json(Req, 202, {[{ok, true}, {<<"_local_id">>, RepId}]});
+ {ok, {cancelled, RepId}} ->
+ send_json(Req, 200, {[{ok, true}, {<<"_local_id">>, RepId}]});
{ok, {JsonResults}} ->
send_json(Req, {[{ok, true} | JsonResults]});
{error, {Type, Details}} ->
send_json(Req, 500, {[{error, Type}, {reason, Details}]});
+ {error, not_found} ->
+ send_json(Req, 404, {[{error, not_found}]});
{error, Reason} ->
- send_json(Req, 500, {[{error, Reason}]})
+ try
+ send_json(Req, 500, {[{error, Reason}]})
+ catch
+ exit:{json_encode, _} ->
+ send_json(Req, 500, {[{error, couch_util:to_binary(Reason)}]})
+ end
catch
throw:{db_not_found, Msg} ->
- send_json(Req, 404, {[{error, db_not_found}, {reason, Msg}]})
+ send_json(Req, 404, {[{error, db_not_found}, {reason, Msg}]});
+ throw:{unauthorized, Msg} ->
+ send_json(Req, 404, {[{error, unauthorized}, {reason, Msg}]})
end;
handle_replicate_req(Req) ->
send_method_not_allowed(Req, "POST").
handle_restart_req(#httpd{method='POST'}=Req) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
ok = couch_httpd:verify_is_server_admin(Req),
couch_server_sup:restart_core_server(),
send_json(Req, 200, {[{ok, true}]});
@@ -155,15 +162,6 @@ handle_config_req(#httpd{method='GET', path_parts=[_,Section]}=Req) ->
KVs = [{list_to_binary(Key), list_to_binary(Value)}
|| {Key, Value} <- couch_config:get(Section)],
send_json(Req, 200, {KVs});
-% PUT /_config/Section/Key
-% "value"
-handle_config_req(#httpd{method='PUT', path_parts=[_, Section, Key]}=Req) ->
- ok = couch_httpd:verify_is_server_admin(Req),
- Value = couch_httpd:json_body(Req),
- Persist = couch_httpd:header_value(Req, "X-Couch-Persist") /= "false",
- OldValue = couch_config:get(Section, Key, ""),
- ok = couch_config:set(Section, Key, ?b2l(Value), Persist),
- send_json(Req, 200, list_to_binary(OldValue));
% GET /_config/Section/Key
handle_config_req(#httpd{method='GET', path_parts=[_, Section, Key]}=Req) ->
ok = couch_httpd:verify_is_server_admin(Req),
@@ -173,24 +171,88 @@ handle_config_req(#httpd{method='GET', path_parts=[_, Section, Key]}=Req) ->
Value ->
send_json(Req, 200, list_to_binary(Value))
end;
-% DELETE /_config/Section/Key
-handle_config_req(#httpd{method='DELETE',path_parts=[_,Section,Key]}=Req) ->
+% PUT or DELETE /_config/Section/Key
+handle_config_req(#httpd{method=Method, path_parts=[_, Section, Key]}=Req)
+ when (Method == 'PUT') or (Method == 'DELETE') ->
ok = couch_httpd:verify_is_server_admin(Req),
Persist = couch_httpd:header_value(Req, "X-Couch-Persist") /= "false",
+ case couch_config:get(<<"httpd">>, <<"config_whitelist">>, null) of
+ null ->
+ % No whitelist; allow all changes.
+ handle_approved_config_req(Req, Persist);
+ WhitelistValue ->
+ % Provide a failsafe to protect against inadvertently locking
+ % onesself out of the config by supplying a syntactically-incorrect
+ % Erlang term. To intentionally lock down the whitelist, supply a
+ % well-formed list which does not include the whitelist config
+ % variable itself.
+ FallbackWhitelist = [{<<"httpd">>, <<"config_whitelist">>}],
+
+ Whitelist = case couch_util:parse_term(WhitelistValue) of
+ {ok, Value} when is_list(Value) ->
+ Value;
+ {ok, _NonListValue} ->
+ FallbackWhitelist;
+ {error, _} ->
+ [{WhitelistSection, WhitelistKey}] = FallbackWhitelist,
+ ?LOG_ERROR("Only whitelisting ~s/~s due to error parsing: ~p",
+ [WhitelistSection, WhitelistKey, WhitelistValue]),
+ FallbackWhitelist
+ end,
+
+ IsRequestedKeyVal = fun(Element) ->
+ case Element of
+ {A, B} ->
+ % For readability, tuples may be used instead of binaries
+ % in the whitelist.
+ case {couch_util:to_binary(A), couch_util:to_binary(B)} of
+ {Section, Key} ->
+ true;
+ {Section, <<"*">>} ->
+ true;
+ _Else ->
+ false
+ end;
+ _Else ->
+ false
+ end
+ end,
+
+ case lists:any(IsRequestedKeyVal, Whitelist) of
+ true ->
+ % Allow modifying this whitelisted variable.
+ handle_approved_config_req(Req, Persist);
+ _NotWhitelisted ->
+ % Disallow modifying this non-whitelisted variable.
+ send_error(Req, 400, <<"modification_not_allowed">>,
+ ?l2b("This config variable is read-only"))
+ end
+ end;
+handle_config_req(Req) ->
+ send_method_not_allowed(Req, "GET,PUT,DELETE").
+
+% PUT /_config/Section/Key
+% "value"
+handle_approved_config_req(#httpd{method='PUT', path_parts=[_, Section, Key]}=Req, Persist) ->
+ Value = couch_httpd:json_body(Req),
+ OldValue = couch_config:get(Section, Key, ""),
+ ok = couch_config:set(Section, Key, ?b2l(Value), Persist),
+ send_json(Req, 200, list_to_binary(OldValue));
+% DELETE /_config/Section/Key
+handle_approved_config_req(#httpd{method='DELETE',path_parts=[_,Section,Key]}=Req, Persist) ->
case couch_config:get(Section, Key, null) of
null ->
throw({not_found, unknown_config_value});
OldValue ->
couch_config:delete(Section, Key, Persist),
send_json(Req, 200, list_to_binary(OldValue))
- end;
-handle_config_req(Req) ->
- send_method_not_allowed(Req, "GET,PUT,DELETE").
+ end.
% httpd db handlers
increment_update_seq_req(#httpd{method='POST'}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
{ok, NewSeq} = couch_db:increment_update_seq(Db),
send_json(Req, {[{ok, true},
{update_seq, NewSeq}
@@ -201,6 +263,7 @@ increment_update_seq_req(Req, _Db) ->
% httpd log handlers
handle_log_req(#httpd{method='GET'}=Req) ->
+ ok = couch_httpd:verify_is_server_admin(Req),
Bytes = list_to_integer(couch_httpd:qs_value(Req, "bytes", "1000")),
Offset = list_to_integer(couch_httpd:qs_value(Req, "offset", "0")),
Chunk = couch_log:read(Bytes, Offset),
diff --git a/src/couchdb/couch_httpd_oauth.erl b/src/couchdb/couch_httpd_oauth.erl
index ddf84008..05ee10e2 100644
--- a/src/couchdb/couch_httpd_oauth.erl
+++ b/src/couchdb/couch_httpd_oauth.erl
@@ -18,9 +18,9 @@
% OAuth auth handler using per-node user db
oauth_authentication_handler(#httpd{mochi_req=MochiReq}=Req) ->
serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = proplists:get_value("oauth_token", Params),
+ AccessToken = couch_util:get_value("oauth_token", Params),
case couch_config:get("oauth_token_secrets", AccessToken) of
- undefined ->
+ undefined ->
couch_httpd:send_error(Req, 400, <<"invalid_token">>,
<<"Invalid OAuth token.">>);
TokenSecret ->
@@ -41,17 +41,17 @@ set_user_ctx(Req, AccessToken) ->
undefined -> throw({bad_request, unknown_oauth_token});
Value -> ?l2b(Value)
end,
- case couch_httpd_auth:get_user(Name) of
+ case couch_auth_cache:get_user_creds(Name) of
nil -> Req;
User ->
- Roles = proplists:get_value(<<"roles">>, User, []),
+ Roles = couch_util:get_value(<<"roles">>, User, []),
Req#httpd{user_ctx=#user_ctx{name=Name, roles=Roles}}
end.
% OAuth request_token
handle_oauth_req(#httpd{path_parts=[_OAuth, <<"request_token">>], method=Method}=Req) ->
serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = proplists:get_value("oauth_token", Params),
+ AccessToken = couch_util:get_value("oauth_token", Params),
TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
case oauth:verify(Signature, atom_to_list(Method), URL, Params, Consumer, TokenSecret) of
true ->
@@ -88,7 +88,7 @@ serve_oauth_authorize(#httpd{method=Method}=Req) ->
'GET' ->
% Confirm with the User that they want to authenticate the Consumer
serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = proplists:get_value("oauth_token", Params),
+ AccessToken = couch_util:get_value("oauth_token", Params),
TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
case oauth:verify(Signature, "GET", URL, Params, Consumer, TokenSecret) of
true ->
@@ -100,7 +100,7 @@ serve_oauth_authorize(#httpd{method=Method}=Req) ->
'POST' ->
% If the User has confirmed, we direct the User back to the Consumer with a verification code
serve_oauth(Req, fun(URL, Params, Consumer, Signature) ->
- AccessToken = proplists:get_value("oauth_token", Params),
+ AccessToken = couch_util:get_value("oauth_token", Params),
TokenSecret = couch_config:get("oauth_token_secrets", AccessToken),
case oauth:verify(Signature, "POST", URL, Params, Consumer, TokenSecret) of
true ->
@@ -129,24 +129,24 @@ serve_oauth(#httpd{mochi_req=MochiReq}=Req, Fun, FailSilently) ->
end
end,
HeaderParams = oauth_uri:params_from_header_string(AuthHeader),
- %Realm = proplists:get_value("realm", HeaderParams),
+ %Realm = couch_util:get_value("realm", HeaderParams),
Params = proplists:delete("realm", HeaderParams) ++ MochiReq:parse_qs(),
?LOG_DEBUG("OAuth Params: ~p", [Params]),
- case proplists:get_value("oauth_version", Params, "1.0") of
+ case couch_util:get_value("oauth_version", Params, "1.0") of
"1.0" ->
- case proplists:get_value("oauth_consumer_key", Params, undefined) of
+ case couch_util:get_value("oauth_consumer_key", Params, undefined) of
undefined ->
case FailSilently of
true -> Req;
false -> couch_httpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer.">>)
end;
ConsumerKey ->
- SigMethod = proplists:get_value("oauth_signature_method", Params),
+ SigMethod = couch_util:get_value("oauth_signature_method", Params),
case consumer_lookup(ConsumerKey, SigMethod) of
none ->
couch_httpd:send_error(Req, 400, <<"invalid_consumer">>, <<"Invalid consumer (key or signature method).">>);
Consumer ->
- Signature = proplists:get_value("oauth_signature", Params),
+ Signature = couch_util:get_value("oauth_signature", Params),
URL = couch_httpd:absolute_uri(Req, MochiReq:get(raw_path)),
Fun(URL, proplists:delete("oauth_signature", Params),
Consumer, Signature)
diff --git a/src/couchdb/couch_httpd_proxy.erl b/src/couchdb/couch_httpd_proxy.erl
new file mode 100644
index 00000000..e4b94f22
--- /dev/null
+++ b/src/couchdb/couch_httpd_proxy.erl
@@ -0,0 +1,425 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+-module(couch_httpd_proxy).
+
+-export([handle_proxy_req/2]).
+
+-include("couch_db.hrl").
+-include("../ibrowse/ibrowse.hrl").
+
+-define(TIMEOUT, infinity).
+-define(PKT_SIZE, 4096).
+
+
+handle_proxy_req(Req, ProxyDest) ->
+
+ %% Bug in Mochiweb?
+ %% Reported here: http://github.com/mochi/mochiweb/issues/issue/16
+ erase(mochiweb_request_body_length),
+
+ Method = get_method(Req),
+ Url = get_url(Req, ProxyDest),
+ Version = get_version(Req),
+ Headers = get_headers(Req),
+ Body = get_body(Req),
+ Options = [
+ {http_vsn, Version},
+ {headers_as_is, true},
+ {response_format, binary},
+ {stream_to, {self(), once}}
+ ],
+ case ibrowse:send_req(Url, Headers, Method, Body, Options, ?TIMEOUT) of
+ {ibrowse_req_id, ReqId} ->
+ stream_response(Req, ProxyDest, ReqId);
+ {error, Reason} ->
+ throw({error, Reason})
+ end.
+
+
+get_method(#httpd{mochi_req=MochiReq}) ->
+ case MochiReq:get(method) of
+ Method when is_atom(Method) ->
+ list_to_atom(string:to_lower(atom_to_list(Method)));
+ Method when is_list(Method) ->
+ list_to_atom(string:to_lower(Method));
+ Method when is_binary(Method) ->
+ list_to_atom(string:to_lower(?b2l(Method)))
+ end.
+
+
+get_url(Req, ProxyDest) when is_binary(ProxyDest) ->
+ get_url(Req, ?b2l(ProxyDest));
+get_url(#httpd{mochi_req=MochiReq}=Req, ProxyDest) ->
+ BaseUrl = case mochiweb_util:partition(ProxyDest, "/") of
+ {[], "/", _} -> couch_httpd:absolute_uri(Req, ProxyDest);
+ _ -> ProxyDest
+ end,
+ ProxyPrefix = "/" ++ ?b2l(hd(Req#httpd.path_parts)),
+ RequestedPath = MochiReq:get(raw_path),
+ case mochiweb_util:partition(RequestedPath, ProxyPrefix) of
+ {[], ProxyPrefix, []} ->
+ BaseUrl;
+ {[], ProxyPrefix, [$/ | DestPath]} ->
+ remove_trailing_slash(BaseUrl) ++ "/" ++ DestPath;
+ {[], ProxyPrefix, DestPath} ->
+ remove_trailing_slash(BaseUrl) ++ "/" ++ DestPath;
+ _Else ->
+ throw({invalid_url_path, {ProxyPrefix, RequestedPath}})
+ end.
+
+get_version(#httpd{mochi_req=MochiReq}) ->
+ MochiReq:get(version).
+
+
+get_headers(#httpd{mochi_req=MochiReq}) ->
+ to_ibrowse_headers(mochiweb_headers:to_list(MochiReq:get(headers)), []).
+
+to_ibrowse_headers([], Acc) ->
+ lists:reverse(Acc);
+to_ibrowse_headers([{K, V} | Rest], Acc) when is_atom(K) ->
+ to_ibrowse_headers([{atom_to_list(K), V} | Rest], Acc);
+to_ibrowse_headers([{K, V} | Rest], Acc) when is_list(K) ->
+ case string:to_lower(K) of
+ "content-length" ->
+ to_ibrowse_headers(Rest, [{content_length, V} | Acc]);
+ % This appears to make ibrowse too smart.
+ %"transfer-encoding" ->
+ % to_ibrowse_headers(Rest, [{transfer_encoding, V} | Acc]);
+ _ ->
+ to_ibrowse_headers(Rest, [{K, V} | Acc])
+ end.
+
+get_body(#httpd{method='GET'}) ->
+ fun() -> eof end;
+get_body(#httpd{method='HEAD'}) ->
+ fun() -> eof end;
+get_body(#httpd{method='DELETE'}) ->
+ fun() -> eof end;
+get_body(#httpd{mochi_req=MochiReq}) ->
+ case MochiReq:get(body_length) of
+ undefined ->
+ <<>>;
+ {unknown_transfer_encoding, Unknown} ->
+ exit({unknown_transfer_encoding, Unknown});
+ chunked ->
+ {fun stream_chunked_body/1, {init, MochiReq, 0}};
+ 0 ->
+ <<>>;
+ Length when is_integer(Length) andalso Length > 0 ->
+ {fun stream_length_body/1, {init, MochiReq, Length}};
+ Length ->
+ exit({invalid_body_length, Length})
+ end.
+
+
+remove_trailing_slash(Url) ->
+ rem_slash(lists:reverse(Url)).
+
+rem_slash([]) ->
+ [];
+rem_slash([$\s | RevUrl]) ->
+ rem_slash(RevUrl);
+rem_slash([$\t | RevUrl]) ->
+ rem_slash(RevUrl);
+rem_slash([$\r | RevUrl]) ->
+ rem_slash(RevUrl);
+rem_slash([$\n | RevUrl]) ->
+ rem_slash(RevUrl);
+rem_slash([$/ | RevUrl]) ->
+ rem_slash(RevUrl);
+rem_slash(RevUrl) ->
+ lists:reverse(RevUrl).
+
+
+stream_chunked_body({init, MReq, 0}) ->
+ % First chunk, do expect-continue dance.
+ init_body_stream(MReq),
+ stream_chunked_body({stream, MReq, 0, [], ?PKT_SIZE});
+stream_chunked_body({stream, MReq, 0, Buf, BRem}) ->
+ % Finished a chunk, get next length. If next length
+ % is 0, its time to try and read trailers.
+ {CRem, Data} = read_chunk_length(MReq),
+ case CRem of
+ 0 ->
+ BodyData = iolist_to_binary(lists:reverse(Buf, Data)),
+ {ok, BodyData, {trailers, MReq, [], ?PKT_SIZE}};
+ _ ->
+ stream_chunked_body(
+ {stream, MReq, CRem, [Data | Buf], BRem-size(Data)}
+ )
+ end;
+stream_chunked_body({stream, MReq, CRem, Buf, BRem}) when BRem =< 0 ->
+ % Time to empty our buffers to the upstream socket.
+ BodyData = iolist_to_binary(lists:reverse(Buf)),
+ {ok, BodyData, {stream, MReq, CRem, [], ?PKT_SIZE}};
+stream_chunked_body({stream, MReq, CRem, Buf, BRem}) ->
+ % Buffer some more data from the client.
+ Length = lists:min([CRem, BRem]),
+ Socket = MReq:get(socket),
+ NewState = case mochiweb_socket:recv(Socket, Length, ?TIMEOUT) of
+ {ok, Data} when size(Data) == CRem ->
+ case mochiweb_socket:recv(Socket, 2, ?TIMEOUT) of
+ {ok, <<"\r\n">>} ->
+ {stream, MReq, 0, [<<"\r\n">>, Data | Buf], BRem-Length-2};
+ _ ->
+ exit(normal)
+ end;
+ {ok, Data} ->
+ {stream, MReq, CRem-Length, [Data | Buf], BRem-Length};
+ _ ->
+ exit(normal)
+ end,
+ stream_chunked_body(NewState);
+stream_chunked_body({trailers, MReq, Buf, BRem}) when BRem =< 0 ->
+ % Empty our buffers and send data upstream.
+ BodyData = iolist_to_binary(lists:reverse(Buf)),
+ {ok, BodyData, {trailers, MReq, [], ?PKT_SIZE}};
+stream_chunked_body({trailers, MReq, Buf, BRem}) ->
+ % Read another trailer into the buffer or stop on an
+ % empty line.
+ Socket = MReq:get(socket),
+ mochiweb_socket:setopts(Socket, [{packet, line}]),
+ case mochiweb_socket:recv(Socket, 0, ?TIMEOUT) of
+ {ok, <<"\r\n">>} ->
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ BodyData = iolist_to_binary(lists:reverse(Buf, <<"\r\n">>)),
+ {ok, BodyData, eof};
+ {ok, Footer} ->
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ NewState = {trailers, MReq, [Footer | Buf], BRem-size(Footer)},
+ stream_chunked_body(NewState);
+ _ ->
+ exit(normal)
+ end;
+stream_chunked_body(eof) ->
+ % Tell ibrowse we're done sending data.
+ eof.
+
+
+stream_length_body({init, MochiReq, Length}) ->
+ % Do the expect-continue dance
+ init_body_stream(MochiReq),
+ stream_length_body({stream, MochiReq, Length});
+stream_length_body({stream, _MochiReq, 0}) ->
+ % Finished streaming.
+ eof;
+stream_length_body({stream, MochiReq, Length}) ->
+ BufLen = lists:min([Length, ?PKT_SIZE]),
+ case MochiReq:recv(BufLen) of
+ <<>> -> eof;
+ Bin -> {ok, Bin, {stream, MochiReq, Length-BufLen}}
+ end.
+
+
+init_body_stream(MochiReq) ->
+ Expect = case MochiReq:get_header_value("expect") of
+ undefined ->
+ undefined;
+ Value when is_list(Value) ->
+ string:to_lower(Value)
+ end,
+ case Expect of
+ "100-continue" ->
+ MochiReq:start_raw_response({100, gb_trees:empty()});
+ _Else ->
+ ok
+ end.
+
+
+read_chunk_length(MochiReq) ->
+ Socket = MochiReq:get(socket),
+ mochiweb_socket:setopts(Socket, [{packet, line}]),
+ case mochiweb_socket:recv(Socket, 0, ?TIMEOUT) of
+ {ok, Header} ->
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ Splitter = fun(C) ->
+ C =/= $\r andalso C =/= $\n andalso C =/= $\s
+ end,
+ {Hex, _Rest} = lists:splitwith(Splitter, ?b2l(Header)),
+ {mochihex:to_int(Hex), Header};
+ _ ->
+ exit(normal)
+ end.
+
+
+stream_response(Req, ProxyDest, ReqId) ->
+ receive
+ {ibrowse_async_headers, ReqId, "100", _} ->
+ % ibrowse doesn't handle 100 Continue responses which
+ % means we have to discard them so the proxy client
+ % doesn't get confused.
+ ibrowse:stream_next(ReqId),
+ stream_response(Req, ProxyDest, ReqId);
+ {ibrowse_async_headers, ReqId, Status, Headers} ->
+ {Source, Dest} = get_urls(Req, ProxyDest),
+ FixedHeaders = fix_headers(Source, Dest, Headers, []),
+ case body_length(FixedHeaders) of
+ chunked ->
+ {ok, Resp} = couch_httpd:start_chunked_response(
+ Req, list_to_integer(Status), FixedHeaders
+ ),
+ ibrowse:stream_next(ReqId),
+ stream_chunked_response(Req, ReqId, Resp),
+ {ok, Resp};
+ Length when is_integer(Length) ->
+ {ok, Resp} = couch_httpd:start_response_length(
+ Req, list_to_integer(Status), FixedHeaders, Length
+ ),
+ ibrowse:stream_next(ReqId),
+ stream_length_response(Req, ReqId, Resp),
+ {ok, Resp};
+ _ ->
+ {ok, Resp} = couch_httpd:start_response(
+ Req, list_to_integer(Status), FixedHeaders
+ ),
+ ibrowse:stream_next(ReqId),
+ stream_length_response(Req, ReqId, Resp),
+ % XXX: MochiWeb apparently doesn't look at the
+ % response to see if it must force close the
+ % connection. So we help it out here.
+ erlang:put(mochiweb_request_force_close, true),
+ {ok, Resp}
+ end
+ end.
+
+
+stream_chunked_response(Req, ReqId, Resp) ->
+ receive
+ {ibrowse_async_response, ReqId, {error, Reason}} ->
+ throw({error, Reason});
+ {ibrowse_async_response, ReqId, Chunk} ->
+ couch_httpd:send_chunk(Resp, Chunk),
+ ibrowse:stream_next(ReqId),
+ stream_chunked_response(Req, ReqId, Resp);
+ {ibrowse_async_response_end, ReqId} ->
+ couch_httpd:last_chunk(Resp)
+ end.
+
+
+stream_length_response(Req, ReqId, Resp) ->
+ receive
+ {ibrowse_async_response, ReqId, {error, Reason}} ->
+ throw({error, Reason});
+ {ibrowse_async_response, ReqId, Chunk} ->
+ couch_httpd:send(Resp, Chunk),
+ ibrowse:stream_next(ReqId),
+ stream_length_response(Req, ReqId, Resp);
+ {ibrowse_async_response_end, ReqId} ->
+ ok
+ end.
+
+
+get_urls(Req, ProxyDest) ->
+ SourceUrl = couch_httpd:absolute_uri(Req, "/" ++ hd(Req#httpd.path_parts)),
+ Source = parse_url(?b2l(iolist_to_binary(SourceUrl))),
+ case (catch parse_url(ProxyDest)) of
+ Dest when is_record(Dest, url) ->
+ {Source, Dest};
+ _ ->
+ DestUrl = couch_httpd:absolute_uri(Req, ProxyDest),
+ {Source, parse_url(DestUrl)}
+ end.
+
+
+fix_headers(_, _, [], Acc) ->
+ lists:reverse(Acc);
+fix_headers(Source, Dest, [{K, V} | Rest], Acc) ->
+ Fixed = case string:to_lower(K) of
+ "location" -> rewrite_location(Source, Dest, V);
+ "content-location" -> rewrite_location(Source, Dest, V);
+ "uri" -> rewrite_location(Source, Dest, V);
+ "destination" -> rewrite_location(Source, Dest, V);
+ "set-cookie" -> rewrite_cookie(Source, Dest, V);
+ _ -> V
+ end,
+ fix_headers(Source, Dest, Rest, [{K, Fixed} | Acc]).
+
+
+rewrite_location(Source, #url{host=Host, port=Port, protocol=Proto}, Url) ->
+ case (catch parse_url(Url)) of
+ #url{host=Host, port=Port, protocol=Proto} = Location ->
+ DestLoc = #url{
+ protocol=Source#url.protocol,
+ host=Source#url.host,
+ port=Source#url.port,
+ path=join_url_path(Source#url.path, Location#url.path)
+ },
+ url_to_url(DestLoc);
+ #url{} ->
+ Url;
+ _ ->
+ url_to_url(Source#url{path=join_url_path(Source#url.path, Url)})
+ end.
+
+
+rewrite_cookie(_Source, _Dest, Cookie) ->
+ Cookie.
+
+
+parse_url(Url) when is_binary(Url) ->
+ ibrowse_lib:parse_url(?b2l(Url));
+parse_url(Url) when is_list(Url) ->
+ ibrowse_lib:parse_url(?b2l(iolist_to_binary(Url))).
+
+
+join_url_path(Src, Dst) ->
+ Src2 = case lists:reverse(Src) of
+ "/" ++ RestSrc -> lists:reverse(RestSrc);
+ _ -> Src
+ end,
+ Dst2 = case Dst of
+ "/" ++ RestDst -> RestDst;
+ _ -> Dst
+ end,
+ Src2 ++ "/" ++ Dst2.
+
+
+url_to_url(#url{host=Host, port=Port, path=Path, protocol=Proto}) ->
+ LPort = case {Proto, Port} of
+ {http, 80} -> "";
+ {https, 443} -> "";
+ _ -> ":" ++ integer_to_list(Port)
+ end,
+ LPath = case Path of
+ "/" ++ _RestPath -> Path;
+ _ -> "/" ++ Path
+ end,
+ atom_to_list(Proto) ++ "://" ++ Host ++ LPort ++ LPath.
+
+
+body_length(Headers) ->
+ case is_chunked(Headers) of
+ true -> chunked;
+ _ -> content_length(Headers)
+ end.
+
+
+is_chunked([]) ->
+ false;
+is_chunked([{K, V} | Rest]) ->
+ case string:to_lower(K) of
+ "transfer-encoding" ->
+ string:to_lower(V) == "chunked";
+ _ ->
+ is_chunked(Rest)
+ end.
+
+content_length([]) ->
+ undefined;
+content_length([{K, V} | Rest]) ->
+ case string:to_lower(K) of
+ "content-length" ->
+ list_to_integer(V);
+ _ ->
+ content_length(Rest)
+ end.
+
diff --git a/src/couchdb/couch_httpd_rewrite.erl b/src/couchdb/couch_httpd_rewrite.erl
new file mode 100644
index 00000000..893f99ed
--- /dev/null
+++ b/src/couchdb/couch_httpd_rewrite.erl
@@ -0,0 +1,461 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+%
+% bind_path is based on bind method from Webmachine
+
+
+%% @doc Module for URL rewriting by pattern matching.
+
+-module(couch_httpd_rewrite).
+-export([handle_rewrite_req/3]).
+-include("couch_db.hrl").
+
+-define(SEPARATOR, $\/).
+-define(MATCH_ALL, {bind, <<"*">>}).
+
+
+%% doc The http rewrite handler. All rewriting is done from
+%% /dbname/_design/ddocname/_rewrite by default.
+%%
+%% each rules should be in rewrites member of the design doc.
+%% Ex of a complete rule :
+%%
+%% {
+%% ....
+%% "rewrites": [
+%% {
+%% "from": "",
+%% "to": "index.html",
+%% "method": "GET",
+%% "query": {}
+%% }
+%% ]
+%% }
+%%
+%% from: is the path rule used to bind current uri to the rule. It
+%% use pattern matching for that.
+%%
+%% to: rule to rewrite an url. It can contain variables depending on binding
+%% variables discovered during pattern matching and query args (url args and from
+%% the query member.)
+%%
+%% method: method to bind the request method to the rule. by default "*"
+%% query: query args you want to define they can contain dynamic variable
+%% by binding the key to the bindings
+%%
+%%
+%% to and from are path with patterns. pattern can be string starting with ":" or
+%% "*". ex:
+%% /somepath/:var/*
+%%
+%% This path is converted in erlang list by splitting "/". Each var are
+%% converted in atom. "*" is converted to '*' atom. The pattern matching is done
+%% by splitting "/" in request url in a list of token. A string pattern will
+%% match equal token. The star atom ('*' in single quotes) will match any number
+%% of tokens, but may only be present as the last pathtern in a pathspec. If all
+%% tokens are matched and all pathterms are used, then the pathspec matches. It works
+%% like webmachine. Each identified token will be reused in to rule and in query
+%%
+%% The pattern matching is done by first matching the request method to a rule. by
+%% default all methods match a rule. (method is equal to "*" by default). Then
+%% It will try to match the path to one rule. If no rule match, then a 404 error
+%% is displayed.
+%%
+%% Once a rule is found we rewrite the request url using the "to" and
+%% "query" members. The identified token are matched to the rule and
+%% will replace var. if '*' is found in the rule it will contain the remaining
+%% part if it exists.
+%%
+%% Examples:
+%%
+%% Dispatch rule URL TO Tokens
+%%
+%% {"from": "/a/b", /a/b?k=v /some/b?k=v var =:= b
+%% "to": "/some/"} k = v
+%%
+%% {"from": "/a/b", /a/b /some/b?var=b var =:= b
+%% "to": "/some/:var"}
+%%
+%% {"from": "/a", /a /some
+%% "to": "/some/*"}
+%%
+%% {"from": "/a/*", /a/b/c /some/b/c
+%% "to": "/some/*"}
+%%
+%% {"from": "/a", /a /some
+%% "to": "/some/*"}
+%%
+%% {"from": "/a/:foo/*", /a/b/c /some/b/c?foo=b foo =:= b
+%% "to": "/some/:foo/*"}
+%%
+%% {"from": "/a/:foo", /a/b /some/?k=b&foo=b foo =:= b
+%% "to": "/some",
+%% "query": {
+%% "k": ":foo"
+%% }}
+%%
+%% {"from": "/a", /a?foo=b /some/b foo =:= b
+%% "to": "/some/:foo",
+%% }}
+%%
+%% {"from": "/a/<foo>" /a/b /some/b foo =:= b
+%% "to": "/a/b",
+%% }}
+%%
+%% {"from": "/a/<foo>.blah" /a/b /some/b foo =:= b
+%% "to": "/a/b",
+%% }}
+
+
+
+
+handle_rewrite_req(#httpd{
+ path_parts=[DbName, <<"_design">>, DesignName, _Rewrite|PathParts],
+ method=Method,
+ mochi_req=MochiReq}=Req, _Db, DDoc) ->
+
+ % we are in a design handler
+ DesignId = <<"_design/", DesignName/binary>>,
+ Prefix = <<"/", DbName/binary, "/", DesignId/binary>>,
+ QueryList = couch_httpd:qs(Req),
+ QueryList1 = [{to_binding(K), V} || {K, V} <- QueryList],
+
+ #doc{body={Props}} = DDoc,
+
+ % get rules from ddoc
+ case couch_util:get_value(<<"rewrites">>, Props) of
+ undefined ->
+ couch_httpd:send_error(Req, 404, <<"rewrite_error">>,
+ <<"Invalid path.">>);
+ Rules ->
+ % create dispatch list from rules
+ DispatchList = [make_rule(Rule) || {Rule} <- Rules],
+
+ %% get raw path by matching url to a rule.
+ RawPath = case try_bind_path(DispatchList, couch_util:to_binary(Method), PathParts,
+ QueryList1) of
+ no_dispatch_path ->
+ throw(not_found);
+ {NewPathParts, Bindings} ->
+ Parts = [quote_plus(X) || X <- NewPathParts],
+
+ % build new path, reencode query args, eventually convert
+ % them to json
+ Path = lists:append(
+ string:join(Parts, [?SEPARATOR]),
+ case Bindings of
+ [] -> [];
+ _ -> [$?, encode_query(Bindings)]
+ end),
+
+ % if path is relative detect it and rewrite path
+ case mochiweb_util:safe_relative_path(Path) of
+ undefined ->
+ ?b2l(Prefix) ++ "/" ++ Path;
+ P1 ->
+ ?b2l(Prefix) ++ "/" ++ P1
+ end
+
+ end,
+
+ % normalize final path (fix levels "." and "..")
+ RawPath1 = ?b2l(iolist_to_binary(normalize_path(RawPath))),
+
+ ?LOG_DEBUG("rewrite to ~p ~n", [RawPath1]),
+
+ % build a new mochiweb request
+ MochiReq1 = mochiweb_request:new(MochiReq:get(socket),
+ MochiReq:get(method),
+ RawPath1,
+ MochiReq:get(version),
+ MochiReq:get(headers)),
+
+ % cleanup, It force mochiweb to reparse raw uri.
+ MochiReq1:cleanup(),
+
+ #httpd{
+ db_url_handlers = DbUrlHandlers,
+ design_url_handlers = DesignUrlHandlers,
+ default_fun = DefaultFun,
+ url_handlers = UrlHandlers
+ } = Req,
+ couch_httpd:handle_request_int(MochiReq1, DefaultFun,
+ UrlHandlers, DbUrlHandlers, DesignUrlHandlers)
+ end.
+
+quote_plus({bind, X}) ->
+ mochiweb_util:quote_plus(X);
+quote_plus(X) ->
+ mochiweb_util:quote_plus(X).
+
+%% @doc Try to find a rule matching current url. If none is found
+%% 404 error not_found is raised
+try_bind_path([], _Method, _PathParts, _QueryList) ->
+ no_dispatch_path;
+try_bind_path([Dispatch|Rest], Method, PathParts, QueryList) ->
+ [{PathParts1, Method1}, RedirectPath, QueryArgs] = Dispatch,
+ case bind_method(Method1, Method) of
+ true ->
+ case bind_path(PathParts1, PathParts, []) of
+ {ok, Remaining, Bindings} ->
+ Bindings1 = Bindings ++ QueryList,
+ % we parse query args from the rule and fill
+ % it eventually with bindings vars
+ QueryArgs1 = make_query_list(QueryArgs, Bindings1, []),
+ % remove params in QueryLists1 that are already in
+ % QueryArgs1
+ Bindings2 = lists:foldl(fun({K, V}, Acc) ->
+ K1 = to_binding(K),
+ KV = case couch_util:get_value(K1, QueryArgs1) of
+ undefined -> [{K1, V}];
+ _V1 -> []
+ end,
+ Acc ++ KV
+ end, [], Bindings1),
+
+ FinalBindings = Bindings2 ++ QueryArgs1,
+ NewPathParts = make_new_path(RedirectPath, FinalBindings,
+ Remaining, []),
+ {NewPathParts, FinalBindings};
+ fail ->
+ try_bind_path(Rest, Method, PathParts, QueryList)
+ end;
+ false ->
+ try_bind_path(Rest, Method, PathParts, QueryList)
+ end.
+
+%% rewriting dynamically the quey list given as query member in
+%% rewrites. Each value is replaced by one binding or an argument
+%% passed in url.
+make_query_list([], _Bindings, Acc) ->
+ Acc;
+make_query_list([{Key, {Value}}|Rest], Bindings, Acc) ->
+ Value1 = to_json({Value}),
+ make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
+make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_binary(Value) ->
+ Value1 = replace_var(Key, Value, Bindings),
+ make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
+make_query_list([{Key, Value}|Rest], Bindings, Acc) when is_list(Value) ->
+ Value1 = replace_var(Key, Value, Bindings),
+ make_query_list(Rest, Bindings, [{to_binding(Key), Value1}|Acc]);
+make_query_list([{Key, Value}|Rest], Bindings, Acc) ->
+ make_query_list(Rest, Bindings, [{to_binding(Key), Value}|Acc]).
+
+replace_var(Key, Value, Bindings) ->
+ case Value of
+ <<":", Var/binary>> ->
+ get_var(Var, Bindings, Value);
+ <<"*">> ->
+ get_var(Value, Bindings, Value);
+ _ when is_list(Value) ->
+ Value1 = lists:foldr(fun(V, Acc) ->
+ V1 = case V of
+ <<":", VName/binary>> ->
+ case get_var(VName, Bindings, V) of
+ V2 when is_list(V2) ->
+ iolist_to_binary(V2);
+ V2 -> V2
+ end;
+ <<"*">> ->
+ get_var(V, Bindings, V);
+ _ ->
+ V
+ end,
+ [V1|Acc]
+ end, [], Value),
+ to_json(Value1);
+ _ when is_binary(Value) ->
+ Value;
+ _ ->
+ case Key of
+ <<"key">> -> to_json(Value);
+ <<"startkey">> -> to_json(Value);
+ <<"start_key">> -> to_json(Value);
+ <<"endkey">> -> to_json(Value);
+ <<"end_key">> -> to_json(Value);
+ _ ->
+ lists:flatten(?JSON_ENCODE(Value))
+ end
+ end.
+
+
+get_var(VarName, Props, Default) ->
+ VarName1 = to_binding(VarName),
+ couch_util:get_value(VarName1, Props, Default).
+
+%% doc: build new patch from bindings. bindings are query args
+%% (+ dynamic query rewritten if needed) and bindings found in
+%% bind_path step.
+make_new_path([], _Bindings, _Remaining, Acc) ->
+ lists:reverse(Acc);
+make_new_path([?MATCH_ALL], _Bindings, Remaining, Acc) ->
+ Acc1 = lists:reverse(Acc) ++ Remaining,
+ Acc1;
+make_new_path([?MATCH_ALL|_Rest], _Bindings, Remaining, Acc) ->
+ Acc1 = lists:reverse(Acc) ++ Remaining,
+ Acc1;
+make_new_path([{bind, P}|Rest], Bindings, Remaining, Acc) ->
+ P2 = case couch_util:get_value({bind, P}, Bindings) of
+ undefined -> << "undefined">>;
+ P1 -> P1
+ end,
+ make_new_path(Rest, Bindings, Remaining, [P2|Acc]);
+make_new_path([P|Rest], Bindings, Remaining, Acc) ->
+ make_new_path(Rest, Bindings, Remaining, [P|Acc]).
+
+
+%% @doc If method of the query fith the rule method. If the
+%% method rule is '*', which is the default, all
+%% request method will bind. It allows us to make rules
+%% depending on HTTP method.
+bind_method(?MATCH_ALL, _Method ) ->
+ true;
+bind_method({bind, Method}, Method) ->
+ true;
+bind_method(_, _) ->
+ false.
+
+
+%% @doc bind path. Using the rule from we try to bind variables given
+%% to the current url by pattern matching
+bind_path([], [], Bindings) ->
+ {ok, [], Bindings};
+bind_path([?MATCH_ALL], [Match|_RestMatch]=Rest, Bindings) ->
+ {ok, Rest, [{?MATCH_ALL, Match}|Bindings]};
+bind_path(_, [], _) ->
+ fail;
+bind_path([{bind, {Token, MatchRe}}|RestToken],
+ [Match|RestMatch],Bindings) ->
+ case re:run(Match, MatchRe, [{capture, all, binary}]) of
+ {match, [_, Match1]} ->
+ bind_path(RestToken, RestMatch, [{{bind, Token}, Match1}|Bindings]);
+ _ ->
+ fail
+ end;
+bind_path([{bind, Token}|RestToken],[Match|RestMatch],Bindings) ->
+ bind_path(RestToken, RestMatch, [{{bind, Token}, Match}|Bindings]);
+bind_path([Token|RestToken], [Token|RestMatch], Bindings) ->
+ bind_path(RestToken, RestMatch, Bindings);
+bind_path(_, _, _) ->
+ fail.
+
+
+%% normalize path.
+normalize_path(Path) ->
+ "/" ++ string:join(normalize_path1(string:tokens(Path,
+ "/"), []), [?SEPARATOR]).
+
+
+normalize_path1([], Acc) ->
+ lists:reverse(Acc);
+normalize_path1([".."|Rest], Acc) ->
+ Acc1 = case Acc of
+ [] -> [".."|Acc];
+ [T|_] when T =:= ".." -> [".."|Acc];
+ [_|R] -> R
+ end,
+ normalize_path1(Rest, Acc1);
+normalize_path1(["."|Rest], Acc) ->
+ normalize_path1(Rest, Acc);
+normalize_path1([Path|Rest], Acc) ->
+ normalize_path1(Rest, [Path|Acc]).
+
+
+%% @doc transform json rule in erlang for pattern matching
+make_rule(Rule) ->
+ Method = case couch_util:get_value(<<"method">>, Rule) of
+ undefined -> ?MATCH_ALL;
+ M -> to_binding(M)
+ end,
+ QueryArgs = case couch_util:get_value(<<"query">>, Rule) of
+ undefined -> [];
+ {Args} -> Args
+ end,
+ FromParts = case couch_util:get_value(<<"from">>, Rule) of
+ undefined -> [?MATCH_ALL];
+ From ->
+ parse_path(From)
+ end,
+ ToParts = case couch_util:get_value(<<"to">>, Rule) of
+ undefined ->
+ throw({error, invalid_rewrite_target});
+ To ->
+ parse_path(To)
+ end,
+ [{FromParts, Method}, ToParts, QueryArgs].
+
+parse_path(Path) ->
+ {ok, SlashRE} = re:compile(<<"\\/">>),
+ path_to_list(re:split(Path, SlashRE), [], 0).
+
+%% @doc convert a path rule (from or to) to an erlang list
+%% * and path variable starting by ":" are converted
+%% in erlang atom.
+path_to_list([], Acc, _DotDotCount) ->
+ lists:reverse(Acc);
+path_to_list([<<>>|R], Acc, DotDotCount) ->
+ path_to_list(R, Acc, DotDotCount);
+path_to_list([<<"*">>|R], Acc, DotDotCount) ->
+ path_to_list(R, [?MATCH_ALL|Acc], DotDotCount);
+path_to_list([<<"..">>|R], Acc, DotDotCount) when DotDotCount == 2 ->
+ case couch_config:get("httpd", "secure_rewrites", "true") of
+ "false" ->
+ path_to_list(R, [<<"..">>|Acc], DotDotCount+1);
+ _Else ->
+ ?LOG_INFO("insecure_rewrite_rule ~p blocked", [lists:reverse(Acc) ++ [<<"..">>] ++ R]),
+ throw({insecure_rewrite_rule, "too many ../.. segments"})
+ end;
+path_to_list([<<"..">>|R], Acc, DotDotCount) ->
+ path_to_list(R, [<<"..">>|Acc], DotDotCount+1);
+path_to_list([P|R], Acc, DotDotCount) ->
+ P1 = case P of
+ <<"<", _Rest/binary>> ->
+ {ok, VarRe} = re:compile(<<"<([^>].*)>(.*)">>),
+ case re:run(P, VarRe, [{capture, all, binary}]) of
+ {match, [_, Var, Match]} ->
+ {ok, MatchRe} = re:compile(<<"(.*)", Match/binary>>),
+ {bind, {Var, MatchRe}};
+ _ -> P
+ end;
+ <<":", Var/binary>> ->
+ to_binding(Var);
+
+
+ _ -> P
+ end,
+ path_to_list(R, [P1|Acc], DotDotCount).
+
+encode_query(Props) ->
+ Props1 = lists:foldl(fun ({{bind, K}, V}, Acc) ->
+ case K of
+ <<"*">> -> Acc;
+ _ ->
+ V1 = case is_list(V) orelse is_binary(V) of
+ true -> V;
+ false ->
+ % probably it's a number
+ quote_plus(V)
+ end,
+ [{K, V1} | Acc]
+ end
+ end, [], Props),
+ lists:flatten(mochiweb_util:urlencode(Props1)).
+
+to_binding({bind, V}) ->
+ {bind, V};
+to_binding(V) when is_list(V) ->
+ to_binding(?l2b(V));
+to_binding(V) ->
+ {bind, V}.
+
+to_json(V) ->
+ iolist_to_binary(?JSON_ENCODE(V)).
diff --git a/src/couchdb/couch_httpd_show.erl b/src/couchdb/couch_httpd_show.erl
index 467c0a42..70dd82e1 100644
--- a/src/couchdb/couch_httpd_show.erl
+++ b/src/couchdb/couch_httpd_show.erl
@@ -13,7 +13,7 @@
-module(couch_httpd_show).
-export([handle_doc_show_req/3, handle_doc_update_req/3, handle_view_list_req/3,
- handle_doc_show/5, handle_view_list/6, get_fun_key/3]).
+ handle_view_list/6, get_fun_key/3]).
-include("couch_db.hrl").
@@ -22,18 +22,42 @@
start_json_response/2,send_chunk/2,last_chunk/1,send_chunked_error/2,
start_chunked_response/3, send_error/4]).
-% /db/_design/foo/show/bar/docid
-% show converts a json doc to a response of any content-type.
+
+% /db/_design/foo/_show/bar/docid
+% show converts a json doc to a response of any content-type.
% it looks up the doc an then passes it to the query server.
% then it sends the response from the query server to the http client.
+
+maybe_open_doc(Db, DocId) ->
+ case catch couch_httpd_db:couch_doc_open(Db, DocId, nil, [conflicts]) of
+ {not_found, missing} -> nil;
+ {not_found,deleted} -> nil;
+ Doc -> Doc
+ end.
handle_doc_show_req(#httpd{
path_parts=[_, _, _, _, ShowName, DocId]
}=Req, Db, DDoc) ->
+
% open the doc
- Doc = couch_httpd_db:couch_doc_open(Db, DocId, nil, [conflicts]),
+ Doc = maybe_open_doc(Db, DocId),
+
% we don't handle revs here b/c they are an internal api
% returns 404 if there is no doc with DocId
- handle_doc_show(Req, Db, DDoc, ShowName, Doc);
+ handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId);
+
+handle_doc_show_req(#httpd{
+ path_parts=[_, _, _, _, ShowName, DocId|Rest]
+ }=Req, Db, DDoc) ->
+
+ DocParts = [DocId|Rest],
+ DocId1 = ?l2b(string:join([?b2l(P)|| P <- DocParts], "/")),
+
+ % open the doc
+ Doc = maybe_open_doc(Db, DocId1),
+
+ % we don't handle revs here b/c they are an internal api
+ % pass 404 docs to the show function
+ handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId1);
handle_doc_show_req(#httpd{
path_parts=[_, _, _, _, ShowName]
@@ -45,12 +69,15 @@ handle_doc_show_req(Req, _Db, _DDoc) ->
send_error(Req, 404, <<"show_error">>, <<"Invalid path.">>).
handle_doc_show(Req, Db, DDoc, ShowName, Doc) ->
+ handle_doc_show(Req, Db, DDoc, ShowName, Doc, null).
+
+handle_doc_show(Req, Db, DDoc, ShowName, Doc, DocId) ->
% get responder for ddoc/showname
CurrentEtag = show_etag(Req, Doc, DDoc, []),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- JsonReq = couch_httpd_external:json_req_obj(Req, Db),
+ JsonReq = couch_httpd_external:json_req_obj(Req, Db, DocId),
JsonDoc = couch_query_servers:json_doc(Doc),
- [<<"resp">>, ExternalResp] =
+ [<<"resp">>, ExternalResp] =
couch_query_servers:ddoc_prompt(DDoc, [<<"shows">>, ShowName], [JsonDoc, JsonReq]),
JsonResp = apply_etag(ExternalResp, CurrentEtag),
couch_httpd_external:send_external_response(Req, JsonResp)
@@ -68,7 +95,7 @@ show_etag(#httpd{user_ctx=UserCtx}=Req, Doc, DDoc, More) ->
get_fun_key(DDoc, Type, Name) ->
#doc{body={Props}} = DDoc,
- Lang = proplists:get_value(<<"language">>, Props, <<"javascript">>),
+ Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
Src = couch_util:get_nested_json_value({Props}, [Type, Name]),
{Lang, Src}.
@@ -98,22 +125,27 @@ handle_doc_update_req(Req, _Db, _DDoc) ->
send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) ->
JsonReq = couch_httpd_external:json_req_obj(Req, Db, DocId),
JsonDoc = couch_query_servers:json_doc(Doc),
- case couch_query_servers:ddoc_prompt(DDoc, [<<"updates">>, UpdateName], [JsonDoc, JsonReq]) of
- [<<"up">>, {NewJsonDoc}, JsonResp] ->
- Options = case couch_httpd:header_value(Req, "X-Couch-Full-Commit", "false") of
+ {Code, JsonResp1} = case couch_query_servers:ddoc_prompt(DDoc,
+ [<<"updates">>, UpdateName], [JsonDoc, JsonReq]) of
+ [<<"up">>, {NewJsonDoc}, {JsonResp}] ->
+ Options = case couch_httpd:header_value(Req, "X-Couch-Full-Commit",
+ "false") of
"true" ->
[full_commit];
_ ->
[]
end,
NewDoc = couch_doc:from_json_obj({NewJsonDoc}),
- Code = 201,
- {ok, _NewRev} = couch_db:update_doc(Db, NewDoc, Options);
+ {ok, NewRev} = couch_db:update_doc(Db, NewDoc, Options),
+ NewRevStr = couch_doc:rev_to_str(NewRev),
+ JsonRespWithRev = {[{<<"headers">>,
+ {[{<<"X-Couch-Update-NewRev">>, NewRevStr}]}} | JsonResp]},
+ {201, JsonRespWithRev};
[<<"up">>, _Other, JsonResp] ->
- Code = 200,
- ok
+ {200, JsonResp}
end,
- JsonResp2 = json_apply_field({<<"code">>, Code}, JsonResp),
+
+ JsonResp2 = couch_util:json_apply_field({<<"code">>, Code}, JsonResp1),
% todo set location field
couch_httpd_external:send_external_response(Req, JsonResp2).
@@ -121,12 +153,14 @@ send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) ->
% view-list request with view and list from same design doc.
handle_view_list_req(#httpd{method='GET',
path_parts=[_, _, DesignName, _, ListName, ViewName]}=Req, Db, DDoc) ->
- handle_view_list(Req, Db, DDoc, ListName, {DesignName, ViewName}, nil);
+ Keys = couch_httpd:qs_json_value(Req, "keys", nil),
+ handle_view_list(Req, Db, DDoc, ListName, {DesignName, ViewName}, Keys);
% view-list request with view and list from different design docs.
handle_view_list_req(#httpd{method='GET',
path_parts=[_, _, _, _, ListName, ViewDesignName, ViewName]}=Req, Db, DDoc) ->
- handle_view_list(Req, Db, DDoc, ListName, {ViewDesignName, ViewName}, nil);
+ Keys = couch_httpd:qs_json_value(Req, "keys", nil),
+ handle_view_list(Req, Db, DDoc, ListName, {ViewDesignName, ViewName}, Keys);
handle_view_list_req(#httpd{method='GET'}=Req, _Db, _DDoc) ->
send_error(Req, 404, <<"list_error">>, <<"Invalid path.">>);
@@ -136,7 +170,7 @@ handle_view_list_req(#httpd{method='POST',
% {Props2} = couch_httpd:json_body(Req),
ReqBody = couch_httpd:body(Req),
{Props2} = ?JSON_DECODE(ReqBody),
- Keys = proplists:get_value(<<"keys">>, Props2, nil),
+ Keys = couch_util:get_value(<<"keys">>, Props2, nil),
handle_view_list(Req#httpd{req_body=ReqBody}, Db, DDoc, ListName, {DesignName, ViewName}, Keys);
handle_view_list_req(#httpd{method='POST',
@@ -144,7 +178,7 @@ handle_view_list_req(#httpd{method='POST',
% {Props2} = couch_httpd:json_body(Req),
ReqBody = couch_httpd:body(Req),
{Props2} = ?JSON_DECODE(ReqBody),
- Keys = proplists:get_value(<<"keys">>, Props2, nil),
+ Keys = couch_util:get_value(<<"keys">>, Props2, nil),
handle_view_list(Req#httpd{req_body=ReqBody}, Db, DDoc, ListName, {ViewDesignName, ViewName}, Keys);
handle_view_list_req(#httpd{method='POST'}=Req, _Db, _DDoc) ->
@@ -156,23 +190,23 @@ handle_view_list_req(Req, _Db, _DDoc) ->
handle_view_list(Req, Db, DDoc, LName, {ViewDesignName, ViewName}, Keys) ->
ViewDesignId = <<"_design/", ViewDesignName/binary>>,
{ViewType, View, Group, QueryArgs} = couch_httpd_view:load_view(Req, Db, {ViewDesignId, ViewName}, Keys),
- Etag = list_etag(Req, Db, Group, {couch_httpd:doc_etag(DDoc), Keys}),
+ Etag = list_etag(Req, Db, Group, View, {couch_httpd:doc_etag(DDoc), Keys}),
couch_httpd:etag_respond(Req, Etag, fun() ->
- output_list(ViewType, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys)
- end).
+ output_list(ViewType, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group)
+ end).
-list_etag(#httpd{user_ctx=UserCtx}=Req, Db, Group, More) ->
+list_etag(#httpd{user_ctx=UserCtx}=Req, Db, Group, View, More) ->
Accept = couch_httpd:header_value(Req, "Accept"),
- couch_httpd_view:view_group_etag(Group, Db, {More, Accept, UserCtx#user_ctx.roles}).
+ couch_httpd_view:view_etag(Db, Group, View, {More, Accept, UserCtx#user_ctx.roles}).
-output_list(map, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
- output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys);
-output_list(reduce, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
- output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys).
+output_list(map, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
+ output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group);
+output_list(reduce, Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
+ output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group).
% next step:
% use with_ddoc_proc/2 to make this simpler
-output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
+output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
#view_query_args{
limit = Limit,
skip = SkipCount
@@ -188,12 +222,13 @@ output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
reduce_count = fun couch_view:reduce_to_count/1,
start_response = StartListRespFun = make_map_start_resp_fun(QServer, Db, LName),
send_row = make_map_send_row_fun(QServer)
- },
+ },
+ CurrentSeq = Group#group.current_seq,
{ok, _, FoldResult} = case Keys of
nil ->
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, Etag, Db, RowCount, ListFoldHelpers),
- couch_view:fold(View, FoldlFun, FoldAccInit,
+ FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs, Etag, Db, CurrentSeq, RowCount, ListFoldHelpers),
+ couch_view:fold(View, FoldlFun, FoldAccInit,
couch_httpd_view:make_key_options(QueryArgs));
Keys ->
lists:foldl(
@@ -202,27 +237,29 @@ output_map_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
start_key = Key,
end_key = Key
},
- FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs2, Etag, Db, RowCount, ListFoldHelpers),
+ FoldlFun = couch_httpd_view:make_view_fold_fun(Req, QueryArgs2, Etag, Db, CurrentSeq, RowCount, ListFoldHelpers),
couch_view:fold(View, FoldlFun, FoldAcc,
couch_httpd_view:make_key_options(QueryArgs2))
end, {ok, nil, FoldAccInit}, Keys)
end,
- finish_list(Req, QServer, Etag, FoldResult, StartListRespFun, RowCount)
+ finish_list(Req, QServer, Etag, FoldResult, StartListRespFun, CurrentSeq, RowCount)
end).
-output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
+output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys, Group) ->
#view_query_args{
limit = Limit,
skip = SkipCount,
group_level = GroupLevel
} = QueryArgs,
+ CurrentSeq = Group#group.current_seq,
+
couch_query_servers:with_ddoc_proc(DDoc, fun(QServer) ->
StartListRespFun = make_reduce_start_resp_fun(QServer, Db, LName),
SendListRowFun = make_reduce_send_row_fun(QServer, Db),
{ok, GroupRowsFun, RespFun} = couch_httpd_view:make_reduce_fold_funs(Req,
- GroupLevel, QueryArgs, Etag,
+ GroupLevel, QueryArgs, Etag, CurrentSeq,
#reduce_fold_helper_funs{
start_response = StartListRespFun,
send_row = SendListRowFun
@@ -230,36 +267,36 @@ output_reduce_list(Req, Db, DDoc, LName, View, QueryArgs, Etag, Keys) ->
FoldAccInit = {Limit, SkipCount, undefined, []},
{ok, FoldResult} = case Keys of
nil ->
- couch_view:fold_reduce(View, RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
+ couch_view:fold_reduce(View, RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
couch_httpd_view:make_key_options(QueryArgs)]);
Keys ->
lists:foldl(
fun(Key, {ok, FoldAcc}) ->
couch_view:fold_reduce(View, RespFun, FoldAcc,
- [{key_group_fun, GroupRowsFun} |
+ [{key_group_fun, GroupRowsFun} |
couch_httpd_view:make_key_options(
QueryArgs#view_query_args{start_key=Key, end_key=Key})]
- )
+ )
end, {ok, FoldAccInit}, Keys)
end,
- finish_list(Req, QServer, Etag, FoldResult, StartListRespFun, null)
+ finish_list(Req, QServer, Etag, FoldResult, StartListRespFun, CurrentSeq, null)
end).
make_map_start_resp_fun(QueryServer, Db, LName) ->
- fun(Req, Etag, TotalRows, Offset, _Acc) ->
- Head = {[{<<"total_rows">>, TotalRows}, {<<"offset">>, Offset}]},
+ fun(Req, Etag, TotalRows, Offset, _Acc, UpdateSeq) ->
+ Head = {[{<<"total_rows">>, TotalRows}, {<<"offset">>, Offset}, {<<"update_seq">>, UpdateSeq}]},
start_list_resp(QueryServer, LName, Req, Db, Head, Etag)
end.
make_reduce_start_resp_fun(QueryServer, Db, LName) ->
- fun(Req2, Etag, _Acc) ->
- start_list_resp(QueryServer, LName, Req2, Db, {[]}, Etag)
+ fun(Req2, Etag, _Acc, UpdateSeq) ->
+ start_list_resp(QueryServer, LName, Req2, Db, {[{<<"update_seq">>, UpdateSeq}]}, Etag)
end.
start_list_resp(QServer, LName, Req, Db, Head, Etag) ->
JsonReq = couch_httpd_external:json_req_obj(Req, Db),
- [<<"start">>,Chunks,JsonResp] = couch_query_servers:ddoc_proc_prompt(QServer,
+ [<<"start">>,Chunks,JsonResp] = couch_query_servers:ddoc_proc_prompt(QServer,
[<<"lists">>, LName], [Head, JsonReq]),
JsonResp2 = apply_etag(JsonResp, Etag),
#extern_resp_args{
@@ -313,7 +350,7 @@ send_non_empty_chunk(Resp, Chunk) ->
_ -> send_chunk(Resp, Chunk)
end.
-finish_list(Req, {Proc, _DDocId}, Etag, FoldResult, StartFun, TotalRows) ->
+finish_list(Req, {Proc, _DDocId}, Etag, FoldResult, StartFun, CurrentSeq, TotalRows) ->
FoldResult2 = case FoldResult of
{Limit, SkipCount, Response, RowAcc} ->
{Limit, SkipCount, Response, RowAcc, nil};
@@ -323,8 +360,8 @@ finish_list(Req, {Proc, _DDocId}, Etag, FoldResult, StartFun, TotalRows) ->
case FoldResult2 of
{_, _, undefined, _, _} ->
{ok, Resp, BeginBody} =
- render_head_for_empty_list(StartFun, Req, Etag, TotalRows),
- [<<"end">>, Chunks] = couch_query_servers:proc_prompt(Proc, [<<"list_end">>]),
+ render_head_for_empty_list(StartFun, Req, Etag, CurrentSeq, TotalRows),
+ [<<"end">>, Chunks] = couch_query_servers:proc_prompt(Proc, [<<"list_end">>]),
Chunk = BeginBody ++ ?b2l(?l2b(Chunks)),
send_non_empty_chunk(Resp, Chunk);
{_, _, Resp, stop, _} ->
@@ -336,32 +373,17 @@ finish_list(Req, {Proc, _DDocId}, Etag, FoldResult, StartFun, TotalRows) ->
last_chunk(Resp).
-render_head_for_empty_list(StartListRespFun, Req, Etag, null) ->
- StartListRespFun(Req, Etag, []); % for reduce
-render_head_for_empty_list(StartListRespFun, Req, Etag, TotalRows) ->
- StartListRespFun(Req, Etag, TotalRows, null, []).
-
-
-% Maybe this is in the proplists API
-% todo move to couch_util
-json_apply_field(H, {L}) ->
- json_apply_field(H, L, []).
-json_apply_field({Key, NewValue}, [{Key, _OldVal} | Headers], Acc) ->
- % drop matching keys
- json_apply_field({Key, NewValue}, Headers, Acc);
-json_apply_field({Key, NewValue}, [{OtherKey, OtherVal} | Headers], Acc) ->
- % something else is next, leave it alone.
- json_apply_field({Key, NewValue}, Headers, [{OtherKey, OtherVal} | Acc]);
-json_apply_field({Key, NewValue}, [], Acc) ->
- % end of list, add ours
- {[{Key, NewValue}|Acc]}.
+render_head_for_empty_list(StartListRespFun, Req, Etag, CurrentSeq, null) ->
+ StartListRespFun(Req, Etag, [], CurrentSeq); % for reduce
+render_head_for_empty_list(StartListRespFun, Req, Etag, CurrentSeq, TotalRows) ->
+ StartListRespFun(Req, Etag, TotalRows, null, [], CurrentSeq).
apply_etag({ExternalResponse}, CurrentEtag) ->
% Here we embark on the delicate task of replacing or creating the
% headers on the JsonResponse object. We need to control the Etag and
% Vary headers. If the external function controls the Etag, we'd have to
% run it to check for a match, which sort of defeats the purpose.
- case proplists:get_value(<<"headers">>, ExternalResponse, nil) of
+ case couch_util:get_value(<<"headers">>, ExternalResponse, nil) of
nil ->
% no JSON headers
% add our Etag and Vary headers to the response
@@ -369,8 +391,8 @@ apply_etag({ExternalResponse}, CurrentEtag) ->
JsonHeaders ->
{[case Field of
{<<"headers">>, JsonHeaders} -> % add our headers
- JsonHeadersEtagged = json_apply_field({<<"Etag">>, CurrentEtag}, JsonHeaders),
- JsonHeadersVaried = json_apply_field({<<"Vary">>, <<"Accept">>}, JsonHeadersEtagged),
+ JsonHeadersEtagged = couch_util:json_apply_field({<<"Etag">>, CurrentEtag}, JsonHeaders),
+ JsonHeadersVaried = couch_util:json_apply_field({<<"Vary">>, <<"Accept">>}, JsonHeadersEtagged),
{<<"headers">>, JsonHeadersVaried};
_ -> % skip non-header fields
Field
diff --git a/src/couchdb/couch_httpd_stats_handlers.erl b/src/couchdb/couch_httpd_stats_handlers.erl
index 40444bf8..41aeaed0 100644
--- a/src/couchdb/couch_httpd_stats_handlers.erl
+++ b/src/couchdb/couch_httpd_stats_handlers.erl
@@ -29,7 +29,8 @@ handle_stats_req(#httpd{method='GET', path_parts=[_, _Mod]}) ->
handle_stats_req(#httpd{method='GET', path_parts=[_, Mod, Key]}=Req) ->
flush(Req),
- Stats = couch_stats_aggregator:get_json({?b2a(Mod), ?b2a(Key)}, range(Req)),
+ Stats = couch_stats_aggregator:get_json({list_to_atom(binary_to_list(Mod)),
+ list_to_atom(binary_to_list(Key))}, range(Req)),
send_json(Req, {[{Mod, {[{Key, Stats}]}}]});
handle_stats_req(#httpd{method='GET', path_parts=[_, _Mod, _Key | _Extra]}) ->
@@ -39,7 +40,7 @@ handle_stats_req(Req) ->
send_method_not_allowed(Req, "GET").
range(Req) ->
- case proplists:get_value("range", couch_httpd:qs(Req)) of
+ case couch_util:get_value("range", couch_httpd:qs(Req)) of
undefined ->
0;
Value ->
@@ -47,7 +48,7 @@ range(Req) ->
end.
flush(Req) ->
- case proplists:get_value("flush", couch_httpd:qs(Req)) of
+ case couch_util:get_value("flush", couch_httpd:qs(Req)) of
"true" ->
couch_stats_aggregator:collect_sample();
_Else ->
diff --git a/src/couchdb/couch_httpd_vhost.erl b/src/couchdb/couch_httpd_vhost.erl
new file mode 100644
index 00000000..3dba2919
--- /dev/null
+++ b/src/couchdb/couch_httpd_vhost.erl
@@ -0,0 +1,402 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+
+
+-module(couch_httpd_vhost).
+-behaviour(gen_server).
+
+-export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
+-export([code_change/3, terminate/2]).
+-export([match_vhost/1, urlsplit_netloc/2]).
+-export([redirect_to_vhost/2]).
+
+-include("couch_db.hrl").
+
+-define(SEPARATOR, $\/).
+-define(MATCH_ALL, {bind, '*'}).
+
+-record(vhosts, {
+ vhost_globals,
+ vhosts = [],
+ vhost_fun
+}).
+
+
+%% doc the vhost manager.
+%% This gen_server keep state of vhosts added to the ini and try to
+%% match the Host header (or forwarded) against rules built against
+%% vhost list.
+%%
+%% Declaration of vhosts take place in the configuration file :
+%%
+%% [vhosts]
+%% example.com = /example
+%% *.example.com = /example
+%%
+%% The first line will rewrite the rquest to display the content of the
+%% example database. This rule works only if the Host header is
+%% 'example.com' and won't work for CNAMEs. Second rule on the other hand
+%% match all CNAMES to example db. So www.example.com or db.example.com
+%% will work.
+%%
+%% The wildcard ('*') should always be the last in the cnames:
+%%
+%% "*.db.example.com = /" will match all cname on top of db
+%% examples to the root of the machine.
+%%
+%%
+%% Rewriting Hosts to path
+%% -----------------------
+%%
+%% Like in the _rewrite handler you could match some variable and use
+%them to create the target path. Some examples:
+%%
+%% [vhosts]
+%% *.example.com = /*
+%% :dbname.example.com = /:dbname
+%% :ddocname.:dbname.example.com = /:dbname/_design/:ddocname/_rewrite
+%%
+%% First rule pass wildcard as dbname, second do the same but use a
+%% variable name and the third one allows you to use any app with
+%% @ddocname in any db with @dbname .
+%%
+%% You could also change the default function to handle request by
+%% changing the setting `redirect_vhost_handler` in `httpd` section of
+%% the Ini:
+%%
+%% [httpd]
+%% redirect_vhost_handler = {Module, Fun}
+%%
+%% The function take 2 args : the mochiweb request object and the target
+%%% path.
+
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+%% @doc Try to find a rule matching current Host heade. some rule is
+%% found it rewrite the Mochiweb Request else it return current Request.
+match_vhost(MochiReq) ->
+ {ok, MochiReq1} = gen_server:call(couch_httpd_vhost, {match_vhost,
+ MochiReq}),
+
+ MochiReq1.
+
+
+%% --------------------
+%% gen_server functions
+%% --------------------
+
+init(_) ->
+ process_flag(trap_exit, true),
+
+ % init state
+ VHosts = make_vhosts(),
+ VHostGlobals = re:split(
+ couch_config:get("httpd", "vhost_global_handlers", ""),
+ ", ?",
+ [{return, list}]
+ ),
+
+ % Set vhost fun
+ DefaultVHostFun = "{couch_httpd_vhost, redirect_to_vhost}",
+ VHostFun = couch_httpd:make_arity_2_fun(
+ couch_config:get("httpd", "redirect_vhost_handler", DefaultVHostFun)
+ ),
+
+
+ Self = self(),
+ % register for changes in vhosts section
+ ok = couch_config:register(
+ fun("vhosts") ->
+ ok = gen_server:call(Self, vhosts_changed, infinity)
+ end
+ ),
+
+ % register for changes in vhost_global_handlers key
+ ok = couch_config:register(
+ fun("httpd", "vhost_global_handlers") ->
+ ok = gen_server:call(Self, vhosts_global_changed, infinity)
+ end
+ ),
+
+ ok = couch_config:register(
+ fun("httpd", "redirect_vhost_handler") ->
+ ok = gen_server:call(Self, fun_changed, infinity)
+ end
+ ),
+
+ {ok, #vhosts{
+ vhost_globals = VHostGlobals,
+ vhosts = VHosts,
+ vhost_fun = VHostFun}
+ }.
+
+
+handle_call({match_vhost, MochiReq}, _From, State) ->
+ #vhosts{
+ vhost_globals = VHostGlobals,
+ vhosts = VHosts,
+ vhost_fun = Fun
+ } = State,
+
+ {"/" ++ VPath, Query, Fragment} = mochiweb_util:urlsplit_path(MochiReq:get(raw_path)),
+ VPathParts = string:tokens(VPath, "/"),
+
+ XHost = couch_config:get("httpd", "x_forwarded_host", "X-Forwarded-Host"),
+ VHost = case MochiReq:get_header_value(XHost) of
+ undefined ->
+ case MochiReq:get_header_value("Host") of
+ undefined -> [];
+ Value1 -> Value1
+ end;
+ Value -> Value
+ end,
+ {VHostParts, VhostPort} = split_host_port(VHost),
+ FinalMochiReq = case try_bind_vhost(VHosts, lists:reverse(VHostParts),
+ VhostPort, VPathParts) of
+ no_vhost_matched -> MochiReq;
+ {VhostTarget, NewPath} ->
+ case vhost_global(VHostGlobals, MochiReq) of
+ true ->
+ MochiReq;
+ _Else ->
+ NewPath1 = mochiweb_util:urlunsplit_path({NewPath, Query,
+ Fragment}),
+ MochiReq1 = mochiweb_request:new(MochiReq:get(socket),
+ MochiReq:get(method),
+ NewPath1,
+ MochiReq:get(version),
+ MochiReq:get(headers)),
+ Fun(MochiReq1, VhostTarget)
+ end
+ end,
+ {reply, {ok, FinalMochiReq}, State};
+
+% update vhosts
+handle_call(vhosts_changed, _From, State) ->
+ {reply, ok, State#vhosts{vhosts= make_vhosts()}};
+
+
+% update vhosts_globals
+handle_call(vhosts_global_changed, _From, State) ->
+ VHostGlobals = re:split(
+ couch_config:get("httpd", "vhost_global_handlers", ""),
+ ", ?",
+ [{return, list}]
+ ),
+ {reply, ok, State#vhosts{vhost_globals=VHostGlobals}};
+% change fun
+handle_call(fun_changed, _From, State) ->
+ DefaultVHostFun = "{couch_httpd_vhosts, redirect_to_vhost}",
+ VHostFun = couch_httpd:make_arity_2_fun(
+ couch_config:get("httpd", "redirect_vhost_handler", DefaultVHostFun)
+ ),
+ {reply, ok, State#vhosts{vhost_fun=VHostFun}}.
+
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+handle_info(_Msg, State) ->
+ {noreply, State}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+
+
+% default redirect vhost handler
+
+redirect_to_vhost(MochiReq, VhostTarget) ->
+ Path = MochiReq:get(raw_path),
+ Target = VhostTarget ++ Path,
+
+ ?LOG_DEBUG("Vhost Target: '~p'~n", [Target]),
+
+ Headers = mochiweb_headers:enter("x-couchdb-vhost-path", Path,
+ MochiReq:get(headers)),
+
+ % build a new mochiweb request
+ MochiReq1 = mochiweb_request:new(MochiReq:get(socket),
+ MochiReq:get(method),
+ Target,
+ MochiReq:get(version),
+ Headers),
+ % cleanup, It force mochiweb to reparse raw uri.
+ MochiReq1:cleanup(),
+
+ MochiReq1.
+
+%% if so, then it will not be rewritten, but will run as a normal couchdb request.
+%* normally you'd use this for _uuids _utils and a few of the others you want to
+%% keep available on vhosts. You can also use it to make databases 'global'.
+vhost_global( VhostGlobals, MochiReq) ->
+ RawUri = MochiReq:get(raw_path),
+ {"/" ++ Path, _, _} = mochiweb_util:urlsplit_path(RawUri),
+
+ Front = case couch_httpd:partition(Path) of
+ {"", "", ""} ->
+ "/"; % Special case the root url handler
+ {FirstPart, _, _} ->
+ FirstPart
+ end,
+ [true] == [true||V <- VhostGlobals, V == Front].
+
+%% bind host
+%% first it try to bind the port then the hostname.
+try_bind_vhost([], _HostParts, _Port, _PathParts) ->
+ no_vhost_matched;
+try_bind_vhost([VhostSpec|Rest], HostParts, Port, PathParts) ->
+ {{VHostParts, VPort, VPath}, Path} = VhostSpec,
+ case bind_port(VPort, Port) of
+ ok ->
+ case bind_vhost(lists:reverse(VHostParts), HostParts, []) of
+ {ok, Bindings, Remainings} ->
+ case bind_path(VPath, PathParts) of
+ {ok, PathParts1} ->
+ Path1 = make_target(Path, Bindings, Remainings, []),
+ {make_path(Path1), make_path(PathParts1)};
+ fail ->
+ try_bind_vhost(Rest, HostParts, Port,
+ PathParts)
+ end;
+ fail -> try_bind_vhost(Rest, HostParts, Port, PathParts)
+ end;
+ fail -> try_bind_vhost(Rest, HostParts, Port, PathParts)
+ end.
+
+%% doc: build new patch from bindings. bindings are query args
+%% (+ dynamic query rewritten if needed) and bindings found in
+%% bind_path step.
+%% TODO: merge code wit rewrite. But we need to make sure we are
+%% in string here.
+make_target([], _Bindings, _Remaining, Acc) ->
+ lists:reverse(Acc);
+make_target([?MATCH_ALL], _Bindings, Remaining, Acc) ->
+ Acc1 = lists:reverse(Acc) ++ Remaining,
+ Acc1;
+make_target([?MATCH_ALL|_Rest], _Bindings, Remaining, Acc) ->
+ Acc1 = lists:reverse(Acc) ++ Remaining,
+ Acc1;
+make_target([{bind, P}|Rest], Bindings, Remaining, Acc) ->
+ P2 = case couch_util:get_value({bind, P}, Bindings) of
+ undefined -> "undefined";
+ P1 -> P1
+ end,
+ make_target(Rest, Bindings, Remaining, [P2|Acc]);
+make_target([P|Rest], Bindings, Remaining, Acc) ->
+ make_target(Rest, Bindings, Remaining, [P|Acc]).
+
+%% bind port
+bind_port(Port, Port) -> ok;
+bind_port(_,_) -> fail.
+
+%% bind bhost
+bind_vhost([],[], Bindings) -> {ok, Bindings, []};
+bind_vhost([?MATCH_ALL], [], _Bindings) -> fail;
+bind_vhost([?MATCH_ALL], Rest, Bindings) -> {ok, Bindings, Rest};
+bind_vhost([], _HostParts, _Bindings) -> fail;
+bind_vhost([{bind, Token}|Rest], [Match|RestHost], Bindings) ->
+ bind_vhost(Rest, RestHost, [{{bind, Token}, Match}|Bindings]);
+bind_vhost([Cname|Rest], [Cname|RestHost], Bindings) ->
+ bind_vhost(Rest, RestHost, Bindings);
+bind_vhost(_, _, _) -> fail.
+
+%% bind path
+bind_path([], PathParts) ->
+ {ok, PathParts};
+bind_path(_VPathParts, []) ->
+ fail;
+bind_path([Path|VRest],[Path|Rest]) ->
+ bind_path(VRest, Rest);
+bind_path(_, _) ->
+ fail.
+
+% utilities
+
+
+%% create vhost list from ini
+make_vhosts() ->
+ lists:foldl(fun({Vhost, Path}, Acc) ->
+ [{parse_vhost(Vhost), split_path(Path)}|Acc]
+ end, [], couch_config:get("vhosts")).
+
+
+parse_vhost(Vhost) ->
+ case urlsplit_netloc(Vhost, []) of
+ {[], Path} ->
+ {make_spec("*", []), 80, Path};
+ {HostPort, []} ->
+ {H, P} = split_host_port(HostPort),
+ H1 = make_spec(H, []),
+ {H1, P, []};
+ {HostPort, Path} ->
+ {H, P} = split_host_port(HostPort),
+ H1 = make_spec(H, []),
+ {H1, P, string:tokens(Path, "/")}
+ end.
+
+
+split_host_port(HostAsString) ->
+ case string:rchr(HostAsString, $:) of
+ 0 ->
+ {split_host(HostAsString), 80};
+ N ->
+ HostPart = string:substr(HostAsString, 1, N-1),
+ case (catch erlang:list_to_integer(HostAsString, N+1,
+ length(HostAsString))) of
+ {'EXIT', _} ->
+ {split_host(HostAsString), 80};
+ Port ->
+ {split_host(HostPart), Port}
+ end
+ end.
+
+split_host(HostAsString) ->
+ string:tokens(HostAsString, "\.").
+
+split_path(Path) ->
+ make_spec(string:tokens(Path, "/"), []).
+
+
+make_spec([], Acc) ->
+ lists:reverse(Acc);
+make_spec([""|R], Acc) ->
+ make_spec(R, Acc);
+make_spec(["*"|R], Acc) ->
+ make_spec(R, [?MATCH_ALL|Acc]);
+make_spec([P|R], Acc) ->
+ P1 = parse_var(P),
+ make_spec(R, [P1|Acc]).
+
+
+parse_var(P) ->
+ case P of
+ ":" ++ Var ->
+ {bind, Var};
+ _ -> P
+ end.
+
+
+% mochiweb doesn't export it.
+urlsplit_netloc("", Acc) ->
+ {lists:reverse(Acc), ""};
+urlsplit_netloc(Rest=[C | _], Acc) when C =:= $/; C =:= $?; C =:= $# ->
+ {lists:reverse(Acc), Rest};
+urlsplit_netloc([C | Rest], Acc) ->
+ urlsplit_netloc(Rest, [C | Acc]).
+
+make_path(Parts) ->
+ "/" ++ string:join(Parts,[?SEPARATOR]).
diff --git a/src/couchdb/couch_httpd_view.erl b/src/couchdb/couch_httpd_view.erl
index 6419ca55..8656c43e 100644
--- a/src/couchdb/couch_httpd_view.erl
+++ b/src/couchdb/couch_httpd_view.erl
@@ -15,9 +15,9 @@
-export([handle_view_req/3,handle_temp_view_req/2]).
--export([get_stale_type/1, get_reduce_type/1, parse_view_params/3]).
--export([make_view_fold_fun/6, finish_view_fold/4, view_row_obj/3]).
--export([view_group_etag/2, view_group_etag/3, make_reduce_fold_funs/5]).
+-export([parse_view_params/3]).
+-export([make_view_fold_fun/7, finish_view_fold/4, finish_view_fold/5, view_row_obj/3]).
+-export([view_etag/3, view_etag/4, make_reduce_fold_funs/6]).
-export([design_doc_view/5, parse_bool_param/1, doc_member/2]).
-export([make_key_options/1, load_view/4]).
@@ -26,6 +26,8 @@
start_json_response/2, start_json_response/3, end_json_response/1,
send_chunked_error/2]).
+-import(couch_db,[get_update_seq/1]).
+
design_doc_view(Req, Db, DName, ViewName, Keys) ->
DesignId = <<"_design/", DName/binary>>,
Stale = get_stale_type(Req),
@@ -55,12 +57,14 @@ design_doc_view(Req, Db, DName, ViewName, Keys) ->
handle_view_req(#httpd{method='GET',
path_parts=[_, _, DName, _, ViewName]}=Req, Db, _DDoc) ->
- design_doc_view(Req, Db, DName, ViewName, nil);
+ Keys = couch_httpd:qs_json_value(Req, "keys", nil),
+ design_doc_view(Req, Db, DName, ViewName, Keys);
handle_view_req(#httpd{method='POST',
path_parts=[_, _, DName, _, ViewName]}=Req, Db, _DDoc) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
{Fields} = couch_httpd:json_body_obj(Req),
- case proplists:get_value(<<"keys">>, Fields, nil) of
+ case couch_util:get_value(<<"keys">>, Fields, nil) of
nil ->
Fmt = "POST to view ~p/~p in database ~p with no keys member.",
?LOG_DEBUG(Fmt, [DName, ViewName, Db]),
@@ -75,14 +79,16 @@ handle_view_req(Req, _Db, _DDoc) ->
send_method_not_allowed(Req, "GET,POST,HEAD").
handle_temp_view_req(#httpd{method='POST'}=Req, Db) ->
+ couch_httpd:validate_ctype(Req, "application/json"),
+ ok = couch_db:check_is_admin(Db),
couch_stats_collector:increment({httpd, temporary_view_reads}),
{Props} = couch_httpd:json_body_obj(Req),
- Language = proplists:get_value(<<"language">>, Props, <<"javascript">>),
- {DesignOptions} = proplists:get_value(<<"options">>, Props, {[]}),
- MapSrc = proplists:get_value(<<"map">>, Props),
- Keys = proplists:get_value(<<"keys">>, Props, nil),
+ Language = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ {DesignOptions} = couch_util:get_value(<<"options">>, Props, {[]}),
+ MapSrc = couch_util:get_value(<<"map">>, Props),
+ Keys = couch_util:get_value(<<"keys">>, Props, nil),
Reduce = get_reduce_type(Req),
- case proplists:get_value(<<"reduce">>, Props, null) of
+ case couch_util:get_value(<<"reduce">>, Props, null) of
null ->
QueryArgs = parse_view_params(Req, Keys, map),
{ok, View, Group} = couch_view:get_temp_map_view(Db, Language,
@@ -108,14 +114,14 @@ output_map_view(Req, View, Group, Db, QueryArgs, nil) ->
limit = Limit,
skip = SkipCount
} = QueryArgs,
- CurrentEtag = view_group_etag(Group, Db),
+ CurrentEtag = view_etag(Db, Group, View),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
{ok, RowCount} = couch_view:get_row_count(View),
- FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, RowCount, #view_fold_helper_funs{reduce_count=fun couch_view:reduce_to_count/1}),
+ FoldlFun = make_view_fold_fun(Req, QueryArgs, CurrentEtag, Db, Group#group.current_seq, RowCount, #view_fold_helper_funs{reduce_count=fun couch_view:reduce_to_count/1}),
FoldAccInit = {Limit, SkipCount, undefined, []},
- {ok, LastReduce, FoldResult} = couch_view:fold(View,
+ {ok, LastReduce, FoldResult} = couch_view:fold(View,
FoldlFun, FoldAccInit, make_key_options(QueryArgs)),
- finish_view_fold(Req, RowCount,
+ finish_view_fold(Req, RowCount,
couch_view:reduce_to_count(LastReduce), FoldResult)
end);
@@ -124,23 +130,23 @@ output_map_view(Req, View, Group, Db, QueryArgs, Keys) ->
limit = Limit,
skip = SkipCount
} = QueryArgs,
- CurrentEtag = view_group_etag(Group, Db, Keys),
+ CurrentEtag = view_etag(Db, Group, View, Keys),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
{ok, RowCount} = couch_view:get_row_count(View),
FoldAccInit = {Limit, SkipCount, undefined, []},
- {LastReduce, FoldResult} = lists:foldl(
- fun(Key, {_, FoldAcc}) ->
- FoldlFun = make_view_fold_fun(Req,
- QueryArgs#view_query_args{
- }, CurrentEtag, Db, RowCount,
+ {LastReduce, FoldResult} = lists:foldl(fun(Key, {_, FoldAcc}) ->
+ FoldlFun = make_view_fold_fun(Req, QueryArgs#view_query_args{},
+ CurrentEtag, Db, Group#group.current_seq, RowCount,
#view_fold_helper_funs{
reduce_count = fun couch_view:reduce_to_count/1
}),
- {ok, LastReduce, FoldResult} = couch_view:fold(View, FoldlFun, FoldAcc,
- make_key_options(QueryArgs#view_query_args{start_key=Key, end_key=Key})),
- {LastReduce, FoldResult}
- end, {{[],[]}, FoldAccInit}, Keys),
- finish_view_fold(Req, RowCount, couch_view:reduce_to_count(LastReduce), FoldResult)
+ {ok, LastReduce, FoldResult} = couch_view:fold(View, FoldlFun,
+ FoldAcc, make_key_options(
+ QueryArgs#view_query_args{start_key=Key, end_key=Key})),
+ {LastReduce, FoldResult}
+ end, {{[],[]}, FoldAccInit}, Keys),
+ finish_view_fold(Req, RowCount, couch_view:reduce_to_count(LastReduce),
+ FoldResult, [{update_seq,Group#group.current_seq}])
end).
output_reduce_view(Req, Db, View, Group, QueryArgs, nil) ->
@@ -149,9 +155,11 @@ output_reduce_view(Req, Db, View, Group, QueryArgs, nil) ->
skip = Skip,
group_level = GroupLevel
} = QueryArgs,
- CurrentEtag = view_group_etag(Group, Db),
+ CurrentEtag = view_etag(Db, Group, View),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}),
+ {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel,
+ QueryArgs, CurrentEtag, Group#group.current_seq,
+ #reduce_fold_helper_funs{}),
FoldAccInit = {Limit, Skip, undefined, []},
{ok, {_, _, Resp, _}} = couch_view:fold_reduce(View,
RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
@@ -165,12 +173,15 @@ output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) ->
skip = Skip,
group_level = GroupLevel
} = QueryArgs,
- CurrentEtag = view_group_etag(Group, Db, Keys),
+ CurrentEtag = view_etag(Db, Group, View, Keys),
couch_httpd:etag_respond(Req, CurrentEtag, fun() ->
- {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel, QueryArgs, CurrentEtag, #reduce_fold_helper_funs{}),
+ {ok, GroupRowsFun, RespFun} = make_reduce_fold_funs(Req, GroupLevel,
+ QueryArgs, CurrentEtag, Group#group.current_seq,
+ #reduce_fold_helper_funs{}),
{Resp, _RedAcc3} = lists:foldl(
fun(Key, {Resp, RedAcc}) ->
- % run the reduce once for each key in keys, with limit etc reapplied for each key
+ % run the reduce once for each key in keys, with limit etc
+ % reapplied for each key
FoldAccInit = {Limit, Skip, Resp, RedAcc},
{_, {_, _, Resp2, RedAcc2}} = couch_view:fold_reduce(View,
RespFun, FoldAccInit, [{key_group_fun, GroupRowsFun} |
@@ -180,7 +191,7 @@ output_reduce_view(Req, Db, View, Group, QueryArgs, Keys) ->
{Resp2, RedAcc2}
end,
{undefined, []}, Keys), % Start with no comma
- finish_reduce_fold(Req, Resp)
+ finish_reduce_fold(Req, Resp, [{update_seq,Group#group.current_seq}])
end).
reverse_key_default(?MIN_STR) -> ?MAX_STR;
@@ -188,28 +199,28 @@ reverse_key_default(?MAX_STR) -> ?MIN_STR;
reverse_key_default(Key) -> Key.
get_stale_type(Req) ->
- list_to_atom(couch_httpd:qs_value(Req, "stale", "nil")).
+ list_to_existing_atom(couch_httpd:qs_value(Req, "stale", "nil")).
get_reduce_type(Req) ->
- list_to_atom(couch_httpd:qs_value(Req, "reduce", "true")).
+ list_to_existing_atom(couch_httpd:qs_value(Req, "reduce", "true")).
load_view(Req, Db, {ViewDesignId, ViewName}, Keys) ->
- Stale = couch_httpd_view:get_stale_type(Req),
- Reduce = couch_httpd_view:get_reduce_type(Req),
+ Stale = get_stale_type(Req),
+ Reduce = get_reduce_type(Req),
case couch_view:get_map_view(Db, ViewDesignId, ViewName, Stale) of
{ok, View, Group} ->
- QueryArgs = couch_httpd_view:parse_view_params(Req, Keys, map),
+ QueryArgs = parse_view_params(Req, Keys, map),
{map, View, Group, QueryArgs};
{not_found, _Reason} ->
case couch_view:get_reduce_view(Db, ViewDesignId, ViewName, Stale) of
{ok, ReduceView, Group} ->
case Reduce of
false ->
- QueryArgs = couch_httpd_view:parse_view_params(Req, Keys, map_red),
+ QueryArgs = parse_view_params(Req, Keys, map_red),
MapView = couch_view:extract_map_view(ReduceView),
{map, MapView, Group, QueryArgs};
_ ->
- QueryArgs = couch_httpd_view:parse_view_params(Req, Keys, reduce),
+ QueryArgs = parse_view_params(Req, Keys, reduce),
{reduce, ReduceView, Group, QueryArgs}
end;
{not_found, Reason} ->
@@ -226,8 +237,8 @@ parse_view_params(Req, Keys, ViewType) ->
QueryList = couch_httpd:qs(Req),
QueryParams =
lists:foldl(fun({K, V}, Acc) ->
- parse_view_param(K, V) ++ Acc
- end, [], QueryList),
+ parse_view_param(K, V) ++ Acc
+ end, [], QueryList),
IsMultiGet = (Keys =/= nil),
Args = #view_query_args{
view_type=ViewType,
@@ -236,20 +247,20 @@ parse_view_params(Req, Keys, ViewType) ->
QueryArgs = lists:foldl(fun({K, V}, Args2) ->
validate_view_query(K, V, Args2)
end, Args, lists:reverse(QueryParams)), % Reverse to match QS order.
-
+ warn_on_empty_key_range(QueryArgs),
GroupLevel = QueryArgs#view_query_args.group_level,
case {ViewType, GroupLevel, IsMultiGet} of
- {reduce, exact, true} ->
- QueryArgs;
- {reduce, _, false} ->
- QueryArgs;
- {reduce, _, _} ->
- % we can simplify code if we just drop this error message.
- Msg = <<"Multi-key fetchs for reduce "
- "view must include `group=true`">>,
- throw({query_parse_error, Msg});
- _ ->
- QueryArgs
+ {reduce, exact, true} ->
+ QueryArgs;
+ {reduce, _, false} ->
+ QueryArgs;
+ {reduce, _, _} ->
+ % we can simplify code if we just drop this error message.
+ Msg = <<"Multi-key fetchs for reduce "
+ "view must include `group=true`">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ QueryArgs
end,
QueryArgs.
@@ -258,22 +269,37 @@ parse_view_param("", _) ->
parse_view_param("key", Value) ->
JsonKey = ?JSON_DECODE(Value),
[{start_key, JsonKey}, {end_key, JsonKey}];
+% TODO: maybe deprecate startkey_docid
parse_view_param("startkey_docid", Value) ->
[{start_docid, ?l2b(Value)}];
+parse_view_param("start_key_doc_id", Value) ->
+ [{start_docid, ?l2b(Value)}];
+% TODO: maybe deprecate endkey_docid
parse_view_param("endkey_docid", Value) ->
[{end_docid, ?l2b(Value)}];
+parse_view_param("end_key_doc_id", Value) ->
+ [{end_docid, ?l2b(Value)}];
+% TODO: maybe deprecate startkey
parse_view_param("startkey", Value) ->
[{start_key, ?JSON_DECODE(Value)}];
+parse_view_param("start_key", Value) ->
+ [{start_key, ?JSON_DECODE(Value)}];
+% TODO: maybe deprecate endkey
parse_view_param("endkey", Value) ->
[{end_key, ?JSON_DECODE(Value)}];
+parse_view_param("end_key", Value) ->
+ [{end_key, ?JSON_DECODE(Value)}];
parse_view_param("limit", Value) ->
[{limit, parse_positive_int_param(Value)}];
parse_view_param("count", _Value) ->
throw({query_parse_error, <<"Query parameter 'count' is now 'limit'.">>});
parse_view_param("stale", "ok") ->
[{stale, ok}];
+parse_view_param("stale", "update_after") ->
+ [{stale, update_after}];
parse_view_param("stale", _Value) ->
- throw({query_parse_error, <<"stale only available as stale=ok">>});
+ throw({query_parse_error,
+ <<"stale only available as stale=ok or as stale=update_after">>});
parse_view_param("update", _Value) ->
throw({query_parse_error, <<"update=false is now stale=ok">>});
parse_view_param("descending", Value) ->
@@ -300,25 +326,45 @@ parse_view_param("callback", _) ->
parse_view_param(Key, Value) ->
[{extra, {Key, Value}}].
+warn_on_empty_key_range(#view_query_args{start_key=undefined}) ->
+ ok;
+warn_on_empty_key_range(#view_query_args{end_key=undefined}) ->
+ ok;
+warn_on_empty_key_range(#view_query_args{start_key=A, end_key=A}) ->
+ ok;
+warn_on_empty_key_range(#view_query_args{
+ start_key=StartKey, end_key=EndKey, direction=Dir}) ->
+ case {Dir, couch_view:less_json(StartKey, EndKey)} of
+ {fwd, false} ->
+ throw({query_parse_error,
+ <<"No rows can match your key range, reverse your ",
+ "start_key and end_key or set descending=true">>});
+ {rev, true} ->
+ throw({query_parse_error,
+ <<"No rows can match your key range, reverse your ",
+ "start_key and end_key or set descending=false">>});
+ _ -> ok
+ end.
+
validate_view_query(start_key, Value, Args) ->
case Args#view_query_args.multi_get of
- true ->
- Msg = <<"Query parameter `start_key` is "
- "not compatible with multi-get">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{start_key=Value}
+ true ->
+ Msg = <<"Query parameter `start_key` is "
+ "not compatible with multi-get">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args#view_query_args{start_key=Value}
end;
validate_view_query(start_docid, Value, Args) ->
Args#view_query_args{start_docid=Value};
validate_view_query(end_key, Value, Args) ->
case Args#view_query_args.multi_get of
- true->
- Msg = <<"Query parameter `end_key` is "
- "not compatible with multi-get">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{end_key=Value}
+ true->
+ Msg = <<"Query parameter `end_key` is "
+ "not compatible with multi-get">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args#view_query_args{end_key=Value}
end;
validate_view_query(end_docid, Value, Args) ->
Args#view_query_args{end_docid=Value};
@@ -326,19 +372,23 @@ validate_view_query(limit, Value, Args) ->
Args#view_query_args{limit=Value};
validate_view_query(list, Value, Args) ->
Args#view_query_args{list=Value};
+validate_view_query(stale, ok, Args) ->
+ Args#view_query_args{stale=ok};
+validate_view_query(stale, update_after, Args) ->
+ Args#view_query_args{stale=update_after};
validate_view_query(stale, _, Args) ->
Args;
validate_view_query(descending, true, Args) ->
case Args#view_query_args.direction of
- rev -> Args; % Already reversed
- fwd ->
- Args#view_query_args{
- direction = rev,
- start_docid =
- reverse_key_default(Args#view_query_args.start_docid),
- end_docid =
- reverse_key_default(Args#view_query_args.end_docid)
- }
+ rev -> Args; % Already reversed
+ fwd ->
+ Args#view_query_args{
+ direction = rev,
+ start_docid =
+ reverse_key_default(Args#view_query_args.start_docid),
+ end_docid =
+ reverse_key_default(Args#view_query_args.end_docid)
+ }
end;
validate_view_query(descending, false, Args) ->
Args; % Ignore default condition
@@ -346,38 +396,41 @@ validate_view_query(skip, Value, Args) ->
Args#view_query_args{skip=Value};
validate_view_query(group_level, Value, Args) ->
case Args#view_query_args.view_type of
- reduce ->
- Args#view_query_args{group_level=Value};
- _ ->
- Msg = <<"Invalid URL parameter 'group' or "
- " 'group_level' for non-reduce view.">>,
- throw({query_parse_error, Msg})
+ reduce ->
+ Args#view_query_args{group_level=Value};
+ _ ->
+ Msg = <<"Invalid URL parameter 'group' or "
+ " 'group_level' for non-reduce view.">>,
+ throw({query_parse_error, Msg})
end;
validate_view_query(inclusive_end, Value, Args) ->
Args#view_query_args{inclusive_end=Value};
+validate_view_query(reduce, false, Args) ->
+ Args;
validate_view_query(reduce, _, Args) ->
case Args#view_query_args.view_type of
- map ->
- Msg = <<"Invalid URL parameter `reduce` for map view.">>,
- throw({query_parse_error, Msg});
- _ ->
- Args
+ map ->
+ Msg = <<"Invalid URL parameter `reduce` for map view.">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args
end;
validate_view_query(include_docs, true, Args) ->
case Args#view_query_args.view_type of
- reduce ->
- Msg = <<"Query parameter `include_docs` "
- "is invalid for reduce views.">>,
- throw({query_parse_error, Msg});
- _ ->
- Args#view_query_args{include_docs=true}
+ reduce ->
+ Msg = <<"Query parameter `include_docs` "
+ "is invalid for reduce views.">>,
+ throw({query_parse_error, Msg});
+ _ ->
+ Args#view_query_args{include_docs=true}
end;
+% Use the view_query_args record's default value
validate_view_query(include_docs, _Value, Args) ->
Args;
validate_view_query(extra, _Value, Args) ->
Args.
-make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) ->
+make_view_fold_fun(Req, QueryArgs, Etag, Db, UpdateSeq, TotalViewCount, HelperFuns) ->
#view_fold_helper_funs{
start_response = StartRespFun,
send_row = SendRowFun,
@@ -388,7 +441,8 @@ make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) ->
include_docs = IncludeDocs
} = QueryArgs,
- fun({{Key, DocId}, Value}, OffsetReds, {AccLimit, AccSkip, Resp, RowFunAcc}) ->
+ fun({{Key, DocId}, Value}, OffsetReds,
+ {AccLimit, AccSkip, Resp, RowFunAcc}) ->
case {AccLimit, AccSkip, Resp} of
{0, _, _} ->
% we've done "limit" rows, stop foldling
@@ -400,7 +454,7 @@ make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) ->
% rendering the first row, first we start the response
Offset = ReduceCountFun(OffsetReds),
{ok, Resp2, RowFunAcc0} = StartRespFun(Req, Etag,
- TotalViewCount, Offset, RowFunAcc),
+ TotalViewCount, Offset, RowFunAcc, UpdateSeq),
{Go, RowFunAcc2} = SendRowFun(Resp2, Db, {{Key, DocId}, Value},
IncludeDocs, RowFunAcc0),
{Go, {AccLimit - 1, 0, Resp2, RowFunAcc2}};
@@ -412,7 +466,7 @@ make_view_fold_fun(Req, QueryArgs, Etag, Db, TotalViewCount, HelperFuns) ->
end
end.
-make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) ->
+make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, UpdateSeq, HelperFuns) ->
#reduce_fold_helper_funs{
start_response = StartRespFun,
send_row = SendRowFun
@@ -438,7 +492,7 @@ make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) ->
(_Key, Red, {AccLimit, 0, undefined, RowAcc0}) when GroupLevel == 0 ->
% we haven't started responding yet and group=false
- {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0),
+ {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0, UpdateSeq),
{Go, RowAcc2} = SendRowFun(Resp2, {null, Red}, RowAcc),
{Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
(_Key, Red, {AccLimit, 0, Resp, RowAcc}) when GroupLevel == 0 ->
@@ -449,18 +503,20 @@ make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) ->
(Key, Red, {AccLimit, 0, undefined, RowAcc0})
when is_integer(GroupLevel), is_list(Key) ->
% group_level and we haven't responded yet
- {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0),
- {Go, RowAcc2} = SendRowFun(Resp2, {lists:sublist(Key, GroupLevel), Red}, RowAcc),
+ {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0, UpdateSeq),
+ {Go, RowAcc2} = SendRowFun(Resp2,
+ {lists:sublist(Key, GroupLevel), Red}, RowAcc),
{Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
(Key, Red, {AccLimit, 0, Resp, RowAcc})
when is_integer(GroupLevel), is_list(Key) ->
% group_level and we've already started the response
- {Go, RowAcc2} = SendRowFun(Resp, {lists:sublist(Key, GroupLevel), Red}, RowAcc),
+ {Go, RowAcc2} = SendRowFun(Resp,
+ {lists:sublist(Key, GroupLevel), Red}, RowAcc),
{Go, {AccLimit - 1, 0, Resp, RowAcc2}};
(Key, Red, {AccLimit, 0, undefined, RowAcc0}) ->
% group=true and we haven't responded yet
- {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0),
+ {ok, Resp2, RowAcc} = StartRespFun(Req, Etag, RowAcc0, UpdateSeq),
{Go, RowAcc2} = SendRowFun(Resp2, {Key, Red}, RowAcc),
{Go, {AccLimit - 1, 0, Resp2, RowAcc2}};
(Key, Red, {AccLimit, 0, Resp, RowAcc}) ->
@@ -470,13 +526,13 @@ make_reduce_fold_funs(Req, GroupLevel, _QueryArgs, Etag, HelperFuns) ->
end,
{ok, GroupRowsFun, RespFun}.
-apply_default_helper_funs(#view_fold_helper_funs{
- start_response = StartResp,
- send_row = SendRow
-}=Helpers) ->
-
+apply_default_helper_funs(
+ #view_fold_helper_funs{
+ start_response = StartResp,
+ send_row = SendRow
+ }=Helpers) ->
StartResp2 = case StartResp of
- undefined -> fun json_view_start_resp/5;
+ undefined -> fun json_view_start_resp/6;
_ -> StartResp
end,
@@ -491,12 +547,13 @@ apply_default_helper_funs(#view_fold_helper_funs{
};
-apply_default_helper_funs(#reduce_fold_helper_funs{
- start_response = StartResp,
- send_row = SendRow
-}=Helpers) ->
+apply_default_helper_funs(
+ #reduce_fold_helper_funs{
+ start_response = StartResp,
+ send_row = SendRow
+ }=Helpers) ->
StartResp2 = case StartResp of
- undefined -> fun json_reduce_start_resp/3;
+ undefined -> fun json_reduce_start_resp/4;
_ -> StartResp
end,
@@ -511,7 +568,7 @@ apply_default_helper_funs(#reduce_fold_helper_funs{
}.
make_key_options(#view_query_args{direction = Dir}=QueryArgs) ->
- [{dir,Dir} | make_start_key_option(QueryArgs) ++
+ [{dir,Dir} | make_start_key_option(QueryArgs) ++
make_end_key_option(QueryArgs)].
make_start_key_option(
@@ -538,10 +595,19 @@ make_end_key_option(
inclusive_end = false}) ->
[{end_key_gt, {EndKey,reverse_key_default(EndDocId)}}].
-json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc) ->
+json_view_start_resp(Req, Etag, TotalViewCount, Offset, _Acc, UpdateSeq) ->
{ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]),
- BeginBody = io_lib:format("{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
- [TotalViewCount, Offset]),
+ BeginBody = case couch_httpd:qs_value(Req, "update_seq") of
+ "true" ->
+ io_lib:format(
+ "{\"total_rows\":~w,\"update_seq\":~w,"
+ "\"offset\":~w,\"rows\":[\r\n",
+ [TotalViewCount, UpdateSeq, Offset]);
+ _Else ->
+ io_lib:format(
+ "{\"total_rows\":~w,\"offset\":~w,\"rows\":[\r\n",
+ [TotalViewCount, Offset])
+ end,
{ok, Resp, BeginBody}.
send_json_view_row(Resp, Db, {{Key, DocId}, Value}, IncludeDocs, RowFront) ->
@@ -549,37 +615,41 @@ send_json_view_row(Resp, Db, {{Key, DocId}, Value}, IncludeDocs, RowFront) ->
send_chunk(Resp, RowFront ++ ?JSON_ENCODE(JsonObj)),
{ok, ",\r\n"}.
-json_reduce_start_resp(Req, Etag, _Acc0) ->
+json_reduce_start_resp(Req, Etag, _Acc0, UpdateSeq) ->
{ok, Resp} = start_json_response(Req, 200, [{"Etag", Etag}]),
- {ok, Resp, "{\"rows\":[\r\n"}.
+ case couch_httpd:qs_value(Req, "update_seq") of
+ "true" ->
+ {ok, Resp, io_lib:format("{\"update_seq\":~w,\"rows\":[\r\n",[UpdateSeq])};
+ _Else ->
+ {ok, Resp, "{\"rows\":[\r\n"}
+ end.
send_json_reduce_row(Resp, {Key, Value}, RowFront) ->
send_chunk(Resp, RowFront ++ ?JSON_ENCODE({[{key, Key}, {value, Value}]})),
{ok, ",\r\n"}.
-view_group_etag(Group, Db) ->
- view_group_etag(Group, Db, nil).
+view_etag(Db, Group, View) ->
+ view_etag(Db, Group, View, nil).
-view_group_etag(#group{sig=Sig,current_seq=CurrentSeq}, _Db, Extra) ->
- % ?LOG_ERROR("Group ~p",[Group]),
- % This is not as granular as it could be.
- % If there are updates to the db that do not effect the view index,
- % they will change the Etag. For more granular Etags we'd need to keep
- % track of the last Db seq that caused an index change.
- couch_httpd:make_etag({Sig, CurrentSeq, Extra}).
+view_etag(Db, Group, {reduce, _, _, View}, Extra) ->
+ view_etag(Db, Group, View, Extra);
+view_etag(Db, Group, {temp_reduce, View}, Extra) ->
+ view_etag(Db, Group, View, Extra);
+view_etag(_Db, #group{sig=Sig}, #view{update_seq=UpdateSeq, purge_seq=PurgeSeq}, Extra) ->
+ couch_httpd:make_etag({Sig, UpdateSeq, PurgeSeq, Extra}).
% the view row has an error
view_row_obj(_Db, {{Key, error}, Value}, _IncludeDocs) ->
{[{key, Key}, {error, Value}]};
% include docs in the view output
view_row_obj(Db, {{Key, DocId}, {Props}}, true) ->
- Rev = case proplists:get_value(<<"_rev">>, Props) of
+ Rev = case couch_util:get_value(<<"_rev">>, Props) of
undefined ->
nil;
Rev0 ->
couch_doc:parse_rev(Rev0)
end,
- IncludeId = proplists:get_value(<<"_id">>, Props, DocId),
+ IncludeId = couch_util:get_value(<<"_id">>, Props, DocId),
view_row_with_doc(Db, {{Key, DocId}, {Props}}, {IncludeId, Rev});
view_row_obj(Db, {{Key, DocId}, Value}, true) ->
view_row_with_doc(Db, {{Key, DocId}, Value}, {DocId, nil});
@@ -593,15 +663,17 @@ view_row_with_doc(Db, {{Key, DocId}, Value}, IdRev) ->
doc_member(Db, {DocId, Rev}) ->
?LOG_DEBUG("Include Doc: ~p ~p", [DocId, Rev]),
case (catch couch_httpd_db:couch_doc_open(Db, DocId, Rev, [])) of
- #doc{} = Doc ->
- JsonDoc = couch_doc:to_json_obj(Doc, []),
- [{doc, JsonDoc}];
- _Else ->
- [{doc, null}]
+ #doc{} = Doc ->
+ JsonDoc = couch_doc:to_json_obj(Doc, []),
+ [{doc, JsonDoc}];
+ _Else ->
+ [{doc, null}]
end.
-
finish_view_fold(Req, TotalRows, Offset, FoldResult) ->
+ finish_view_fold(Req, TotalRows, Offset, FoldResult, []).
+
+finish_view_fold(Req, TotalRows, Offset, FoldResult, Fields) ->
case FoldResult of
{_, _, undefined, _} ->
% nothing found in the view or keys, nothing has been returned
@@ -610,7 +682,7 @@ finish_view_fold(Req, TotalRows, Offset, FoldResult) ->
{total_rows, TotalRows},
{offset, Offset},
{rows, []}
- ]});
+ ] ++ Fields});
{_, _, Resp, _} ->
% end the view
send_chunk(Resp, "\r\n]}"),
@@ -618,11 +690,14 @@ finish_view_fold(Req, TotalRows, Offset, FoldResult) ->
end.
finish_reduce_fold(Req, Resp) ->
+ finish_reduce_fold(Req, Resp, []).
+
+finish_reduce_fold(Req, Resp, Fields) ->
case Resp of
undefined ->
send_json(Req, 200, {[
{rows, []}
- ]});
+ ] ++ Fields});
Resp ->
send_chunk(Resp, "\r\n]}"),
end_json_response(Resp)
@@ -630,11 +705,11 @@ finish_reduce_fold(Req, Resp) ->
parse_bool_param(Val) ->
case string:to_lower(Val) of
- "true" -> true;
- "false" -> false;
- _ ->
- Msg = io_lib:format("Invalid boolean parameter: ~p", [Val]),
- throw({query_parse_error, ?l2b(Msg)})
+ "true" -> true;
+ "false" -> false;
+ _ ->
+ Msg = io_lib:format("Invalid boolean parameter: ~p", [Val]),
+ throw({query_parse_error, ?l2b(Msg)})
end.
parse_int_param(Val) ->
diff --git a/src/couchdb/couch_js_functions.hrl b/src/couchdb/couch_js_functions.hrl
new file mode 100644
index 00000000..67f06686
--- /dev/null
+++ b/src/couchdb/couch_js_functions.hrl
@@ -0,0 +1,148 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-define(AUTH_DB_DOC_VALIDATE_FUNCTION, <<"
+ function(newDoc, oldDoc, userCtx) {
+ if (newDoc._deleted === true) {
+ // allow deletes by admins and matching users
+ // without checking the other fields
+ if ((userCtx.roles.indexOf('_admin') !== -1) ||
+ (userCtx.name == oldDoc.name)) {
+ return;
+ } else {
+ throw({forbidden: 'Only admins may delete other user docs.'});
+ }
+ }
+
+ if ((oldDoc && oldDoc.type !== 'user') || newDoc.type !== 'user') {
+ throw({forbidden : 'doc.type must be user'});
+ } // we only allow user docs for now
+
+ if (!newDoc.name) {
+ throw({forbidden: 'doc.name is required'});
+ }
+
+ if (newDoc.roles && !isArray(newDoc.roles)) {
+ throw({forbidden: 'doc.roles must be an array'});
+ }
+
+ if (newDoc._id !== ('org.couchdb.user:' + newDoc.name)) {
+ throw({
+ forbidden: 'Doc ID must be of the form org.couchdb.user:name'
+ });
+ }
+
+ if (oldDoc) { // validate all updates
+ if (oldDoc.name !== newDoc.name) {
+ throw({forbidden: 'Usernames can not be changed.'});
+ }
+ }
+
+ if (newDoc.password_sha && !newDoc.salt) {
+ throw({
+ forbidden: 'Users with password_sha must have a salt.' +
+ 'See /_utils/script/couch.js for example code.'
+ });
+ }
+
+ if (userCtx.roles.indexOf('_admin') === -1) {
+ if (oldDoc) { // validate non-admin updates
+ if (userCtx.name !== newDoc.name) {
+ throw({
+ forbidden: 'You may only update your own user document.'
+ });
+ }
+ // validate role updates
+ var oldRoles = oldDoc.roles.sort();
+ var newRoles = newDoc.roles.sort();
+
+ if (oldRoles.length !== newRoles.length) {
+ throw({forbidden: 'Only _admin may edit roles'});
+ }
+
+ for (var i = 0; i < oldRoles.length; i++) {
+ if (oldRoles[i] !== newRoles[i]) {
+ throw({forbidden: 'Only _admin may edit roles'});
+ }
+ }
+ } else if (newDoc.roles.length > 0) {
+ throw({forbidden: 'Only _admin may set roles'});
+ }
+ }
+
+ // no system roles in users db
+ for (var i = 0; i < newDoc.roles.length; i++) {
+ if (newDoc.roles[i][0] === '_') {
+ throw({
+ forbidden:
+ 'No system roles (starting with underscore) in users db.'
+ });
+ }
+ }
+
+ // no system names as names
+ if (newDoc.name[0] === '_') {
+ throw({forbidden: 'Username may not start with underscore.'});
+ }
+ }
+">>).
+
+
+-define(REP_DB_DOC_VALIDATE_FUN, <<"
+ function(newDoc, oldDoc, userCtx) {
+ function reportError(error_msg) {
+ log('Error writing document `' + newDoc._id +
+ '` to replicator DB: ' + error_msg);
+ throw({forbidden: error_msg});
+ }
+
+ var isReplicator = (userCtx.roles.indexOf('_replicator') >= 0);
+ if (oldDoc && !newDoc._deleted && !isReplicator) {
+ reportError('Only the replicator can edit replication documents.');
+ }
+
+ if (newDoc.user_ctx) {
+ var user_ctx = newDoc.user_ctx;
+
+ if (typeof user_ctx !== 'object') {
+ reportError('The user_ctx property must be an object.');
+ }
+
+ if (!(user_ctx.name === null ||
+ (typeof user_ctx.name === 'undefined') ||
+ ((typeof user_ctx.name === 'string') &&
+ user_ctx.name.length > 0))) {
+ reportError('The name property of the user_ctx must be a ' +
+ 'non-empty string.');
+ }
+
+ if (user_ctx.roles && !isArray(user_ctx.roles)) {
+ reportError('The roles property of the user_ctx must be ' +
+ 'an array of strings.');
+ }
+
+ if (user_ctx.roles) {
+ for (var i = 0; i < user_ctx.roles.length; i++) {
+ var role = user_ctx.roles[i];
+
+ if (typeof role !== 'string' || role.length === 0) {
+ reportError('Each role must be a non-empty string.');
+ }
+ if (role[0] === '_') {
+ reportError('System roles (starting with underscore) ' +
+ 'are not allowed.');
+ }
+ }
+ }
+ }
+ }
+">>).
diff --git a/src/couchdb/couch_key_tree.erl b/src/couchdb/couch_key_tree.erl
index d5944119..2e3b3e3c 100644
--- a/src/couchdb/couch_key_tree.erl
+++ b/src/couchdb/couch_key_tree.erl
@@ -16,100 +16,97 @@
-export([map/2, get_all_leafs/1, count_leafs/1, remove_leafs/2,
get_all_leafs_full/1,stem/2,map_leafs/2]).
-% a key tree looks like this:
-% Tree -> [] or [{Key, Value, ChildTree} | SiblingTree]
-% ChildTree -> Tree
-% SiblingTree -> [] or [{SiblingKey, Value, Tree} | Tree]
-% And each Key < SiblingKey
-
+% Tree::term() is really a tree(), but we don't want to require R13B04 yet
+-type branch() :: {Key::term(), Value::term(), Tree::term()}.
+-type path() :: {Start::pos_integer(), branch()}.
+-type tree() :: [branch()]. % sorted by key
% partial trees arranged by how much they are cut off.
-merge(A, B) ->
- {Merged, HasConflicts} =
- lists:foldl(
- fun(InsertTree, {AccTrees, AccConflicts}) ->
- {ok, Merged, Conflicts} = merge_one(AccTrees, InsertTree, [], false),
- {Merged, Conflicts or AccConflicts}
- end,
- {A, false}, B),
- if HasConflicts or
- ((length(Merged) =/= length(A)) and (length(Merged) =/= length(B))) ->
+-spec merge([path()], path()) -> {[path()], conflicts | no_conflicts}.
+merge(Paths, Path) ->
+ {ok, Merged, HasConflicts} = merge_one(Paths, Path, [], false),
+ if HasConflicts ->
+ Conflicts = conflicts;
+ (length(Merged) =/= length(Paths)) and (length(Merged) =/= 1) ->
Conflicts = conflicts;
true ->
Conflicts = no_conflicts
end,
{lists:sort(Merged), Conflicts}.
+-spec merge_one(Original::[path()], Inserted::path(), [path()], boolean()) ->
+ {ok, Merged::[path()], NewConflicts::boolean()}.
merge_one([], Insert, OutAcc, ConflictsAcc) ->
{ok, [Insert | OutAcc], ConflictsAcc};
-merge_one([{Start, Tree}|Rest], {StartInsert, TreeInsert}, OutAcc, ConflictsAcc) ->
- if Start =< StartInsert ->
- StartA = Start,
- StartB = StartInsert,
- TreeA = Tree,
- TreeB = TreeInsert;
- true ->
- StartB = Start,
- StartA = StartInsert,
- TreeB = Tree,
- TreeA = TreeInsert
- end,
- case merge_at([TreeA], StartB - StartA, TreeB) of
- {ok, [CombinedTrees], Conflicts} ->
- merge_one(Rest, {StartA, CombinedTrees}, OutAcc, Conflicts or ConflictsAcc);
+merge_one([{Start, Tree}|Rest], {StartInsert, TreeInsert}, Acc, HasConflicts) ->
+ case merge_at([Tree], StartInsert - Start, [TreeInsert]) of
+ {ok, [Merged], Conflicts} ->
+ MergedStart = lists:min([Start, StartInsert]),
+ merge_one(Rest, {MergedStart, Merged}, Acc, Conflicts or HasConflicts);
no ->
- merge_one(Rest, {StartB, TreeB}, [{StartA, TreeA} | OutAcc], ConflictsAcc)
+ AccOut = [{Start, Tree} | Acc],
+ merge_one(Rest, {StartInsert, TreeInsert}, AccOut, HasConflicts)
end.
+-spec merge_at(tree(), Place::integer(), tree()) ->
+ {ok, Merged::tree(), HasConflicts::boolean()} | no.
+merge_at(_Ours, _Place, []) ->
+ no;
merge_at([], _Place, _Insert) ->
no;
-merge_at([{Key, Value, SubTree}|Sibs], 0, {InsertKey, InsertValue, InsertSubTree}) ->
- if Key == InsertKey ->
- {Merge, Conflicts} = merge_simple(SubTree, InsertSubTree),
- {ok, [{Key, Value, Merge} | Sibs], Conflicts};
- true ->
- case merge_at(Sibs, 0, {InsertKey, InsertValue, InsertSubTree}) of
- {ok, Merged, Conflicts} ->
- {ok, [{Key, Value, SubTree} | Merged], Conflicts};
- no ->
- no
- end
- end;
-merge_at([{Key, Value, SubTree}|Sibs], Place, Insert) ->
- case merge_at(SubTree, Place - 1,Insert) of
+merge_at([{Key, Value, SubTree}|Sibs], Place, InsertTree) when Place > 0 ->
+ % inserted starts later than committed, need to drill into committed subtree
+ case merge_at(SubTree, Place - 1, InsertTree) of
{ok, Merged, Conflicts} ->
{ok, [{Key, Value, Merged} | Sibs], Conflicts};
no ->
- case merge_at(Sibs, Place, Insert) of
+ case merge_at(Sibs, Place, InsertTree) of
{ok, Merged, Conflicts} ->
{ok, [{Key, Value, SubTree} | Merged], Conflicts};
no ->
no
end
+ end;
+merge_at(OurTree, Place, [{Key, Value, SubTree}]) when Place < 0 ->
+ % inserted starts earlier than committed, need to drill into insert subtree
+ case merge_at(OurTree, Place + 1, SubTree) of
+ {ok, Merged, Conflicts} ->
+ {ok, [{Key, Value, Merged}], Conflicts};
+ no ->
+ no
+ end;
+merge_at([{Key, Value, SubTree}|Sibs], 0, [{Key, _Value, InsertSubTree}]) ->
+ {Merged, Conflicts} = merge_simple(SubTree, InsertSubTree),
+ {ok, [{Key, Value, Merged} | Sibs], Conflicts};
+merge_at([{OurKey, _, _} | _], 0, [{Key, _, _}]) when OurKey > Key ->
+ % siblings keys are ordered, no point in continuing
+ no;
+merge_at([Tree | Sibs], 0, InsertTree) ->
+ case merge_at(Sibs, 0, InsertTree) of
+ {ok, Merged, Conflicts} ->
+ {ok, [Tree | Merged], Conflicts};
+ no ->
+ no
end.
% key tree functions
+
+-spec merge_simple(tree(), tree()) -> {Merged::tree(), NewConflicts::boolean()}.
merge_simple([], B) ->
{B, false};
merge_simple(A, []) ->
{A, false};
-merge_simple([ATree | ANextTree], [BTree | BNextTree]) ->
- {AKey, AValue, ASubTree} = ATree,
- {BKey, _BValue, BSubTree} = BTree,
- if
- AKey == BKey ->
- %same key
- {MergedSubTree, Conflict1} = merge_simple(ASubTree, BSubTree),
- {MergedNextTree, Conflict2} = merge_simple(ANextTree, BNextTree),
- {[{AKey, AValue, MergedSubTree} | MergedNextTree], Conflict1 or Conflict2};
- AKey < BKey ->
- {MTree, _} = merge_simple(ANextTree, [BTree | BNextTree]),
- {[ATree | MTree], true};
- true ->
- {MTree, _} = merge_simple([ATree | ANextTree], BNextTree),
- {[BTree | MTree], true}
- end.
+merge_simple([{Key, Value, SubA} | NextA], [{Key, _, SubB} | NextB]) ->
+ {MergedSubTree, Conflict1} = merge_simple(SubA, SubB),
+ {MergedNextTree, Conflict2} = merge_simple(NextA, NextB),
+ {[{Key, Value, MergedSubTree} | MergedNextTree], Conflict1 or Conflict2};
+merge_simple([{A, _, _} = Tree | Next], [{B, _, _} | _] = Insert) when A < B ->
+ {Merged, _} = merge_simple(Next, Insert),
+ {[Tree | Merged], true};
+merge_simple(Ours, [Tree | Next]) ->
+ {Merged, _} = merge_simple(Ours, Next),
+ {[Tree | Merged], true}.
find_missing(_Tree, []) ->
[];
@@ -159,7 +156,7 @@ remove_leafs(Trees, Keys) ->
fun({PathPos, Path},TreeAcc) ->
[SingleTree] = lists:foldl(
fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
- {NewTrees, _} = merge(TreeAcc, [{PathPos + 1 - length(Path), SingleTree}]),
+ {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
NewTrees
end, [], FilteredPaths),
{NewTree, RemovedKeys}.
@@ -290,7 +287,7 @@ map(Fun, [{Pos, Tree}|Rest]) ->
map_simple(_Fun, _Pos, []) ->
[];
map_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree]) ->
- Value2 = Fun({Pos, Key}, Value,
+ Value2 = Fun({Pos, Key}, Value,
if SubTree == [] -> leaf; true -> branch end),
[{Key, Value2, map_simple(Fun, Pos + 1, SubTree)} | map_simple(Fun, Pos, RestTree)].
@@ -321,7 +318,7 @@ stem(Trees, Limit) ->
fun({PathPos, Path},TreeAcc) ->
[SingleTree] = lists:foldl(
fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
- {NewTrees, _} = merge(TreeAcc, [{PathPos + 1 - length(Path), SingleTree}]),
+ {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
NewTrees
end, [], Paths2).
diff --git a/src/couchdb/couch_log.erl b/src/couchdb/couch_log.erl
index 5a45c207..dfd2d178 100644
--- a/src/couchdb/couch_log.erl
+++ b/src/couchdb/couch_log.erl
@@ -50,14 +50,23 @@ init([]) ->
fun("log", "file") ->
?MODULE:stop();
("log", "level") ->
+ ?MODULE:stop();
+ ("log", "include_sasl") ->
?MODULE:stop()
end),
Filename = couch_config:get("log", "file", "couchdb.log"),
- Level = couch_config:get("log", "level", "info"),
+ Level = level_integer(list_to_atom(couch_config:get("log", "level", "info"))),
+ Sasl = list_to_atom(couch_config:get("log", "include_sasl", "true")),
+
+ case ets:info(?MODULE) of
+ undefined -> ets:new(?MODULE, [named_table]);
+ _ -> ok
+ end,
+ ets:insert(?MODULE, {level, Level}),
{ok, Fd} = file:open(Filename, [append]),
- {ok, {Fd, level_integer(list_to_atom(Level))}}.
+ {ok, {Fd, Level, Sasl}}.
debug_on() ->
get_level_integer() =< ?LEVEL_DEBUG.
@@ -72,29 +81,35 @@ get_level() ->
level_atom(get_level_integer()).
get_level_integer() ->
- catch gen_event:call(error_logger, couch_log, get_level_integer).
+ try
+ ets:lookup_element(?MODULE, level, 2)
+ catch error:badarg ->
+ ?LEVEL_ERROR
+ end.
set_level_integer(Int) ->
gen_event:call(error_logger, couch_log, {set_level_integer, Int}).
-handle_event({error_report, _, {Pid, couch_error, {Format, Args}}}, {Fd, _LogLevel}=State) ->
- log(Fd, Pid, error, Format, Args),
- {ok, State};
-handle_event({error_report, _, {Pid, _, _}}=Event, {Fd, _LogLevel}=State) ->
- log(Fd, Pid, error, "~p", [Event]),
- {ok, State};
-handle_event({error, _, {Pid, Format, Args}}, {Fd, _LogLevel}=State) ->
+handle_event({Pid, couch_error, {Format, Args}}, {Fd, _LogLevel, _Sasl}=State) ->
log(Fd, Pid, error, Format, Args),
{ok, State};
-handle_event({info_report, _, {Pid, couch_info, {Format, Args}}}, {Fd, LogLevel}=State)
+handle_event({Pid, couch_info, {Format, Args}}, {Fd, LogLevel, _Sasl}=State)
when LogLevel =< ?LEVEL_INFO ->
log(Fd, Pid, info, Format, Args),
{ok, State};
-handle_event({info_report, _, {Pid, couch_debug, {Format, Args}}}, {Fd, LogLevel}=State)
+handle_event({Pid, couch_debug, {Format, Args}}, {Fd, LogLevel, _Sasl}=State)
when LogLevel =< ?LEVEL_DEBUG ->
log(Fd, Pid, debug, Format, Args),
{ok, State};
-handle_event({_, _, {Pid, _, _}}=Event, {Fd, LogLevel}=State)
+handle_event({error_report, _, {Pid, _, _}}=Event, {Fd, _LogLevel, Sasl}=State)
+when Sasl =/= false ->
+ log(Fd, Pid, error, "~p", [Event]),
+ {ok, State};
+handle_event({error, _, {Pid, Format, Args}}, {Fd, _LogLevel, Sasl}=State)
+when Sasl =/= false ->
+ log(Fd, Pid, error, Format, Args),
+ {ok, State};
+handle_event({_, _, {Pid, _, _}}=Event, {Fd, LogLevel, _Sasl}=State)
when LogLevel =< ?LEVEL_TMI ->
% log every remaining event if tmi!
log(Fd, Pid, tmi, "~p", [Event]),
@@ -102,10 +117,9 @@ when LogLevel =< ?LEVEL_TMI ->
handle_event(_Event, State) ->
{ok, State}.
-handle_call(get_level_integer, {_Fd, LogLevel}=State) ->
- {ok, LogLevel, State};
-handle_call({set_level_integer, NewLevel}, {Fd, _LogLevel}) ->
- {ok, ok, {Fd, NewLevel}}.
+handle_call({set_level_integer, NewLevel}, {Fd, _LogLevel, Sasl}) ->
+ ets:insert(?MODULE, {level, NewLevel}),
+ {ok, ok, {Fd, NewLevel, Sasl}}.
handle_info(_Info, State) ->
{ok, State}.
@@ -113,7 +127,7 @@ handle_info(_Info, State) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-terminate(_Arg, {Fd, _LoggingLevel}) ->
+terminate(_Arg, {Fd, _LoggingLevel, _Sasl}) ->
file:close(Fd).
log(Fd, Pid, Level, Format, Args) ->
@@ -121,11 +135,11 @@ log(Fd, Pid, Level, Format, Args) ->
ok = io:format("[~s] [~p] ~s~n", [Level, Pid, Msg]), % dump to console too
Msg2 = re:replace(lists:flatten(Msg),"\\r\\n|\\r|\\n", "\r\n",
[global, {return, list}]),
- ok = io:format(Fd, "[~s] [~s] [~p] ~s\r~n\r~n", [httpd_util:rfc1123_date(), Level, Pid, Msg2]).
+ ok = io:format(Fd, "[~s] [~s] [~p] ~s\r~n", [httpd_util:rfc1123_date(), Level, Pid, Msg2]).
read(Bytes, Offset) ->
LogFileName = couch_config:get("log", "file"),
- LogFileSize = couch_util:file_read_size(LogFileName),
+ LogFileSize = filelib:file_size(LogFileName),
{ok, Fd} = file:open(LogFileName, [read]),
Start = lists:max([LogFileSize - Bytes, 0]) + Offset,
diff --git a/src/couchdb/couch_native_process.erl b/src/couchdb/couch_native_process.erl
index 65e4e131..b512f712 100644
--- a/src/couchdb/couch_native_process.erl
+++ b/src/couchdb/couch_native_process.erl
@@ -1,16 +1,16 @@
-% Licensed under the Apache License, Version 2.0 (the "License");
-% you may not use this file except in compliance with the License.
+% Licensed under the Apache License, Version 2.0 (the "License");
+% you may not use this file except in compliance with the License.
%
% You may obtain a copy of the License at
% http://www.apache.org/licenses/LICENSE-2.0
%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-% either express or implied.
+% Unless required by applicable law or agreed to in writing,
+% software distributed under the License is distributed on an
+% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+% either express or implied.
%
% See the License for the specific language governing permissions
-% and limitations under the License.
+% and limitations under the License.
%
% This file drew much inspiration from erlview, which was written by and
% copyright Michael McDaniel [http://autosys.us], and is also under APL 2.0
@@ -25,14 +25,14 @@
%
% fun({Doc}) ->
% % Below, we emit a single record - the _id as key, null as value
-% DocId = proplists:get_value(Doc, <<"_id">>, null),
+% DocId = couch_util:get_value(Doc, <<"_id">>, null),
% Emit(DocId, null)
% end.
%
% which should be roughly the same as the javascript:
% emit(doc._id, null);
%
-% This module exposes enough functions such that a native erlang server can
+% This module exposes enough functions such that a native erlang server can
% act as a fully-fleged view server, but no 'helper' functions specifically
% for simplifying your erlang view code. It is expected other third-party
% extensions will evolve which offer useful layers on top of this view server
@@ -40,7 +40,8 @@
-module(couch_native_process).
-behaviour(gen_server).
--export([start_link/0,init/1,terminate/2,handle_call/3,handle_cast/2]).
+-export([start_link/0,init/1,terminate/2,handle_call/3,handle_cast/2,code_change/3,
+ handle_info/2]).
-export([set_timeout/2, prompt/2]).
-define(STATE, native_proc_state).
@@ -76,23 +77,27 @@ handle_call({prompt, Data}, _From, State) ->
throw:{error, Why} ->
{State, [<<"error">>, Why, Why]}
end,
-
+
case Resp of
{error, Reason} ->
Msg = io_lib:format("couch native server error: ~p", [Reason]),
{reply, [<<"error">>, <<"native_query_server">>, list_to_binary(Msg)], NewState};
[<<"error">> | Rest] ->
- Msg = io_lib:format("couch native server error: ~p", [Rest]),
+ % Msg = io_lib:format("couch native server error: ~p", [Rest]),
+ % TODO: markh? (jan)
{reply, [<<"error">> | Rest], NewState};
[<<"fatal">> | Rest] ->
- Msg = io_lib:format("couch native server error: ~p", [Rest]),
+ % Msg = io_lib:format("couch native server error: ~p", [Rest]),
+ % TODO: markh? (jan)
{stop, fatal, [<<"error">> | Rest], NewState};
Resp ->
{reply, Resp, NewState}
end.
-handle_cast(_Msg, State) -> {noreply, State}.
-handle_info(_Msg, State) -> {noreply, State}.
+handle_cast(foo, State) -> {noreply, State}.
+handle_info({'EXIT',_,normal}, State) -> {noreply, State};
+handle_info({'EXIT',_,Reason}, State) ->
+ {stop, Reason, State}.
terminate(_Reason, _State) -> ok.
code_change(_OldVersion, State, _Extra) -> {ok, State}.
@@ -168,12 +173,12 @@ ddoc(State, {DDoc}, [FunPath, Args]) ->
% load fun from the FunPath
BFun = lists:foldl(fun
(Key, {Props}) when is_list(Props) ->
- proplists:get_value(Key, Props, nil);
- (Key, Fun) when is_binary(Fun) ->
+ couch_util:get_value(Key, Props, nil);
+ (_Key, Fun) when is_binary(Fun) ->
Fun;
- (Key, nil) ->
+ (_Key, nil) ->
throw({error, not_found});
- (Key, Fun) ->
+ (_Key, _Fun) ->
throw({error, malformed_ddoc})
end, {DDoc}, FunPath),
ddoc(State, makefun(State, BFun, {DDoc}), FunPath, Args).
@@ -238,7 +243,7 @@ load_ddoc(DDocs, DDocId) ->
try dict:fetch(DDocId, DDocs) of
{DDoc} -> {DDoc}
catch
- _:Else -> throw({error, ?l2b(io_lib:format("Native Query Server missing DDoc with Id: ~s",[DDocId]))})
+ _:_Else -> throw({error, ?l2b(io_lib:format("Native Query Server missing DDoc with Id: ~s",[DDocId]))})
end.
bindings(State, Sig) ->
@@ -300,7 +305,7 @@ bindings(State, Sig, DDoc) ->
{'FoldRows', FoldRows}
],
case DDoc of
- {Props} ->
+ {_Props} ->
Bindings ++ [{'DDoc', DDoc}];
_Else -> Bindings
end.
@@ -308,11 +313,11 @@ bindings(State, Sig, DDoc) ->
% thanks to erlview, via:
% http://erlang.org/pipermail/erlang-questions/2003-November/010544.html
makefun(State, Source) ->
- Sig = erlang:md5(Source),
+ Sig = couch_util:md5(Source),
BindFuns = bindings(State, Sig),
{Sig, makefun(State, Source, BindFuns)}.
makefun(State, Source, {DDoc}) ->
- Sig = erlang:md5(lists:flatten([Source, term_to_binary(DDoc)])),
+ Sig = couch_util:md5(lists:flatten([Source, term_to_binary(DDoc)])),
BindFuns = bindings(State, Sig, {DDoc}),
{Sig, makefun(State, Source, BindFuns)};
makefun(_State, Source, BindFuns) when is_list(BindFuns) ->
@@ -365,7 +370,7 @@ start_list_resp(Self, Sig) ->
undefined -> {[{<<"headers">>, {[]}}]};
CurrHdrs -> CurrHdrs
end,
- Chunks =
+ Chunks =
case erlang:get(Sig) of
undefined -> [];
CurrChunks -> CurrChunks
diff --git a/src/couchdb/couch_os_daemons.erl b/src/couchdb/couch_os_daemons.erl
new file mode 100644
index 00000000..bdd39997
--- /dev/null
+++ b/src/couchdb/couch_os_daemons.erl
@@ -0,0 +1,363 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+-module(couch_os_daemons).
+-behaviour(gen_server).
+
+-export([start_link/0, info/0, info/1, config_change/2]).
+
+-export([init/1, terminate/2, code_change/3]).
+-export([handle_call/3, handle_cast/2, handle_info/2]).
+
+-include("couch_db.hrl").
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+-define(PORT_OPTIONS, [stream, {line, 1024}, binary, exit_status, hide]).
+-define(TIMEOUT, 5000).
+
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+info() ->
+ info([]).
+
+info(Options) ->
+ gen_server:call(?MODULE, {daemon_info, Options}).
+
+config_change(Section, Key) ->
+ gen_server:cast(?MODULE, {config_change, Section, Key}).
+
+init(_) ->
+ process_flag(trap_exit, true),
+ ok = couch_config:register(fun couch_os_daemons:config_change/2),
+ Table = ets:new(?MODULE, [protected, set, {keypos, #daemon.port}]),
+ reload_daemons(Table),
+ {ok, Table}.
+
+terminate(_Reason, Table) ->
+ [stop_port(D) || D <- ets:tab2list(Table)],
+ ok.
+
+handle_call({daemon_info, Options}, _From, Table) when is_list(Options) ->
+ case lists:member(table, Options) of
+ true ->
+ {reply, {ok, ets:tab2list(Table)}, Table};
+ _ ->
+ {reply, {ok, Table}, Table}
+ end;
+handle_call(Msg, From, Table) ->
+ ?LOG_ERROR("Unknown call message to ~p from ~p: ~p", [?MODULE, From, Msg]),
+ {stop, error, Table}.
+
+handle_cast({config_change, Sect, Key}, Table) ->
+ restart_daemons(Table, Sect, Key),
+ case Sect of
+ "os_daemons" -> reload_daemons(Table);
+ _ -> ok
+ end,
+ {noreply, Table};
+handle_cast(stop, Table) ->
+ {stop, normal, Table};
+handle_cast(Msg, Table) ->
+ ?LOG_ERROR("Unknown cast message to ~p: ~p", [?MODULE, Msg]),
+ {stop, error, Table}.
+
+handle_info({'EXIT', Port, Reason}, Table) ->
+ case ets:lookup(Table, Port) of
+ [] ->
+ ?LOG_INFO("Port ~p exited after stopping: ~p~n", [Port, Reason]);
+ [#daemon{status=stopping}] ->
+ true = ets:delete(Table, Port);
+ [#daemon{name=Name, status=restarting, errors=Errs}=D] ->
+ ?LOG_INFO("Daemon ~P restarting after config change.", [Name]),
+ true = ets:delete(Table, Port),
+ {ok, Port2} = start_port(D#daemon.cmd),
+ true = ets:insert(Table, D#daemon{
+ port=Port2, status=running, kill=undefined, errors=Errs, buf=[]
+ });
+ [#daemon{name=Name, status=halted}] ->
+ ?LOG_ERROR("Halted daemon process: ~p", [Name]);
+ [D] ->
+ ?LOG_ERROR("Invalid port state at exit: ~p", [D])
+ end,
+ {noreply, Table};
+handle_info({Port, closed}, Table) ->
+ handle_info({Port, {exit_status, closed}}, Table);
+handle_info({Port, {exit_status, Status}}, Table) ->
+ case ets:lookup(Table, Port) of
+ [] ->
+ ?LOG_ERROR("Unknown port ~p exiting ~p", [Port, Status]),
+ {stop, {error, unknown_port_died, Status}, Table};
+ [#daemon{name=Name, status=restarting, errors=Errors}=D] ->
+ ?LOG_INFO("Daemon ~P restarting after config change.", [Name]),
+ true = ets:delete(Table, Port),
+ {ok, Port2} = start_port(D#daemon.cmd),
+ true = ets:insert(Table, D#daemon{
+ port=Port2, kill=undefined, errors=Errors, buf=[]
+ }),
+ {noreply, Table};
+ [#daemon{status=stopping}=D] ->
+ % The configuration changed and this daemon is no
+ % longer needed.
+ ?LOG_DEBUG("Port ~p shut down.", [D#daemon.name]),
+ true = ets:delete(Table, Port),
+ {noreply, Table};
+ [D] ->
+ % Port died for unknown reason. Check to see if it's
+ % died too many times or if we should boot it back up.
+ case should_halt([now() | D#daemon.errors]) of
+ {true, _} ->
+ % Halting the process. We won't try and reboot
+ % until the configuration changes.
+ Fmt = "Daemon ~p halted with exit_status ~p",
+ ?LOG_ERROR(Fmt, [D#daemon.name, Status]),
+ D2 = D#daemon{status=halted, errors=nil, buf=nil},
+ true = ets:insert(Table, D2),
+ {noreply, Table};
+ {false, Errors} ->
+ % We're guessing it was a random error, this daemon
+ % has behaved so we'll give it another chance.
+ Fmt = "Daemon ~p is being rebooted after exit_status ~p",
+ ?LOG_INFO(Fmt, [D#daemon.name, Status]),
+ true = ets:delete(Table, Port),
+ {ok, Port2} = start_port(D#daemon.cmd),
+ true = ets:insert(Table, D#daemon{
+ port=Port2, kill=undefined, errors=Errors, buf=[]
+ }),
+ {noreply, Table}
+ end;
+ _Else ->
+ throw(error)
+ end;
+handle_info({Port, {data, {noeol, Data}}}, Table) ->
+ [#daemon{buf=Buf}=D] = ets:lookup(Table, Port),
+ true = ets:insert(Table, D#daemon{buf=[Data | Buf]}),
+ {noreply, Table};
+handle_info({Port, {data, {eol, Data}}}, Table) ->
+ [#daemon{buf=Buf}=D] = ets:lookup(Table, Port),
+ Line = lists:reverse(Buf, Data),
+ % The first line echoed back is the kill command
+ % for when we go to get rid of the port. Lines after
+ % that are considered part of the stdio API.
+ case D#daemon.kill of
+ undefined ->
+ true = ets:insert(Table, D#daemon{kill=?b2l(Line), buf=[]});
+ _Else ->
+ D2 = case (catch ?JSON_DECODE(Line)) of
+ {invalid_json, Rejected} ->
+ ?LOG_ERROR("Ignoring OS daemon request: ~p", [Rejected]),
+ D;
+ JSON ->
+ {ok, D3} = handle_port_message(D, JSON),
+ D3
+ end,
+ true = ets:insert(Table, D2#daemon{buf=[]})
+ end,
+ {noreply, Table};
+handle_info({Port, Error}, Table) ->
+ ?LOG_ERROR("Unexpectd message from port ~p: ~p", [Port, Error]),
+ stop_port(Port),
+ [D] = ets:lookup(Table, Port),
+ true = ets:insert(Table, D#daemon{status=restarting, buf=nil}),
+ {noreply, Table};
+handle_info(Msg, Table) ->
+ ?LOG_ERROR("Unexpected info message to ~p: ~p", [?MODULE, Msg]),
+ {stop, error, Table}.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+% Internal API
+
+%
+% Port management helpers
+%
+
+start_port(Command) ->
+ PrivDir = couch_util:priv_dir(),
+ Spawnkiller = filename:join(PrivDir, "couchspawnkillable"),
+ Port = open_port({spawn, Spawnkiller ++ " " ++ Command}, ?PORT_OPTIONS),
+ {ok, Port}.
+
+
+stop_port(#daemon{port=Port, kill=undefined}=D) ->
+ ?LOG_ERROR("Stopping daemon without a kill command: ~p", [D#daemon.name]),
+ catch port_close(Port);
+stop_port(#daemon{port=Port}=D) ->
+ ?LOG_DEBUG("Stopping daemon: ~p", [D#daemon.name]),
+ os:cmd(D#daemon.kill),
+ catch port_close(Port).
+
+
+handle_port_message(#daemon{port=Port}=Daemon, [<<"get">>, Section]) ->
+ KVs = couch_config:get(Section),
+ Data = lists:map(fun({K, V}) -> {?l2b(K), ?l2b(V)} end, KVs),
+ Json = iolist_to_binary(?JSON_ENCODE({Data})),
+ port_command(Port, <<Json/binary, "\n">>),
+ {ok, Daemon};
+handle_port_message(#daemon{port=Port}=Daemon, [<<"get">>, Section, Key]) ->
+ Value = case couch_config:get(Section, Key, null) of
+ null -> null;
+ String -> ?l2b(String)
+ end,
+ Json = iolist_to_binary(?JSON_ENCODE(Value)),
+ port_command(Port, <<Json/binary, "\n">>),
+ {ok, Daemon};
+handle_port_message(Daemon, [<<"register">>, Sec]) when is_binary(Sec) ->
+ Patterns = lists:usort(Daemon#daemon.cfg_patterns ++ [{?b2l(Sec)}]),
+ {ok, Daemon#daemon{cfg_patterns=Patterns}};
+handle_port_message(Daemon, [<<"register">>, Sec, Key])
+ when is_binary(Sec) andalso is_binary(Key) ->
+ Pattern = {?b2l(Sec), ?b2l(Key)},
+ Patterns = lists:usort(Daemon#daemon.cfg_patterns ++ [Pattern]),
+ {ok, Daemon#daemon{cfg_patterns=Patterns}};
+handle_port_message(#daemon{name=Name}=Daemon, [<<"log">>, Msg]) ->
+ handle_log_message(Name, Msg, <<"info">>),
+ {ok, Daemon};
+handle_port_message(#daemon{name=Name}=Daemon, [<<"log">>, Msg, {Opts}]) ->
+ Level = couch_util:get_value(<<"level">>, Opts, <<"info">>),
+ handle_log_message(Name, Msg, Level),
+ {ok, Daemon};
+handle_port_message(#daemon{name=Name}=Daemon, Else) ->
+ ?LOG_ERROR("Daemon ~p made invalid request: ~p", [Name, Else]),
+ {ok, Daemon}.
+
+
+handle_log_message(Name, Msg, _Level) when not is_binary(Msg) ->
+ ?LOG_ERROR("Invalid log message from daemon ~p: ~p", [Name, Msg]);
+handle_log_message(Name, Msg, <<"debug">>) ->
+ ?LOG_DEBUG("Daemon ~p :: ~s", [Name, ?b2l(Msg)]);
+handle_log_message(Name, Msg, <<"info">>) ->
+ ?LOG_INFO("Daemon ~p :: ~s", [Name, ?b2l(Msg)]);
+handle_log_message(Name, Msg, <<"error">>) ->
+ ?LOG_ERROR("Daemon: ~p :: ~s", [Name, ?b2l(Msg)]);
+handle_log_message(Name, Msg, Level) ->
+ ?LOG_ERROR("Invalid log level from daemon: ~p", [Level]),
+ ?LOG_INFO("Daemon: ~p :: ~s", [Name, ?b2l(Msg)]).
+
+%
+% Daemon management helpers
+%
+
+reload_daemons(Table) ->
+ % List of daemons we want to have running.
+ Configured = lists:sort(couch_config:get("os_daemons")),
+
+ % Remove records for daemons that were halted.
+ MSpecHalted = #daemon{name='$1', cmd='$2', status=halted, _='_'},
+ Halted = lists:sort([{N, C} || [N, C] <- ets:match(Table, MSpecHalted)]),
+ ok = stop_os_daemons(Table, find_to_stop(Configured, Halted, [])),
+
+ % Stop daemons that are running
+ % Start newly configured daemons
+ MSpecRunning = #daemon{name='$1', cmd='$2', status=running, _='_'},
+ Running = lists:sort([{N, C} || [N, C] <- ets:match(Table, MSpecRunning)]),
+ ok = stop_os_daemons(Table, find_to_stop(Configured, Running, [])),
+ ok = boot_os_daemons(Table, find_to_boot(Configured, Running, [])),
+ ok.
+
+
+restart_daemons(Table, Sect, Key) ->
+ restart_daemons(Table, Sect, Key, ets:first(Table)).
+
+restart_daemons(_, _, _, '$end_of_table') ->
+ ok;
+restart_daemons(Table, Sect, Key, Port) ->
+ [D] = ets:lookup(Table, Port),
+ HasSect = lists:member({Sect}, D#daemon.cfg_patterns),
+ HasKey = lists:member({Sect, Key}, D#daemon.cfg_patterns),
+ case HasSect or HasKey of
+ true ->
+ stop_port(D),
+ D2 = D#daemon{status=restarting, buf=nil},
+ true = ets:insert(Table, D2);
+ _ ->
+ ok
+ end,
+ restart_daemons(Table, Sect, Key, ets:next(Table, Port)).
+
+
+stop_os_daemons(_Table, []) ->
+ ok;
+stop_os_daemons(Table, [{Name, Cmd} | Rest]) ->
+ [[Port]] = ets:match(Table, #daemon{port='$1', name=Name, cmd=Cmd, _='_'}),
+ [D] = ets:lookup(Table, Port),
+ case D#daemon.status of
+ halted ->
+ ets:delete(Table, Port);
+ _ ->
+ stop_port(D),
+ D2 = D#daemon{status=stopping, errors=nil, buf=nil},
+ true = ets:insert(Table, D2)
+ end,
+ stop_os_daemons(Table, Rest).
+
+boot_os_daemons(_Table, []) ->
+ ok;
+boot_os_daemons(Table, [{Name, Cmd} | Rest]) ->
+ {ok, Port} = start_port(Cmd),
+ true = ets:insert(Table, #daemon{port=Port, name=Name, cmd=Cmd}),
+ boot_os_daemons(Table, Rest).
+
+% Elements unique to the configured set need to be booted.
+find_to_boot([], _Rest, Acc) ->
+ % Nothing else configured.
+ Acc;
+find_to_boot([D | R1], [D | R2], Acc) ->
+ % Elements are equal, daemon already running.
+ find_to_boot(R1, R2, Acc);
+find_to_boot([D1 | R1], [D2 | _]=A2, Acc) when D1 < D2 ->
+ find_to_boot(R1, A2, [D1 | Acc]);
+find_to_boot(A1, [_ | R2], Acc) ->
+ find_to_boot(A1, R2, Acc);
+find_to_boot(Rest, [], Acc) ->
+ % No more candidates for already running. Boot all.
+ Rest ++ Acc.
+
+% Elements unique to the running set need to be killed.
+find_to_stop([], Rest, Acc) ->
+ % The rest haven't been found, so they must all
+ % be ready to die.
+ Rest ++ Acc;
+find_to_stop([D | R1], [D | R2], Acc) ->
+ % Elements are equal, daemon already running.
+ find_to_stop(R1, R2, Acc);
+find_to_stop([D1 | R1], [D2 | _]=A2, Acc) when D1 < D2 ->
+ find_to_stop(R1, A2, Acc);
+find_to_stop(A1, [D2 | R2], Acc) ->
+ find_to_stop(A1, R2, [D2 | Acc]);
+find_to_stop(_, [], Acc) ->
+ % No more running daemons to worry about.
+ Acc.
+
+should_halt(Errors) ->
+ RetryTimeCfg = couch_config:get("os_daemon_settings", "retry_time", "5"),
+ RetryTime = list_to_integer(RetryTimeCfg),
+
+ Now = now(),
+ RecentErrors = lists:filter(fun(Time) ->
+ timer:now_diff(Now, Time) =< RetryTime * 1000000
+ end, Errors),
+
+ RetryCfg = couch_config:get("os_daemon_settings", "max_retries", "3"),
+ Retries = list_to_integer(RetryCfg),
+
+ {length(RecentErrors) >= Retries, RecentErrors}.
diff --git a/src/couchdb/couch_os_process.erl b/src/couchdb/couch_os_process.erl
index 5ac13715..5776776b 100644
--- a/src/couchdb/couch_os_process.erl
+++ b/src/couchdb/couch_os_process.erl
@@ -93,10 +93,10 @@ readjson(OsProc) when is_record(OsProc, os_proc) ->
?LOG_INFO("OS Process ~p Log :: ~s", [OsProc#os_proc.port, Msg]),
readjson(OsProc);
[<<"error">>, Id, Reason] ->
- throw({list_to_atom(binary_to_list(Id)),Reason});
+ throw({couch_util:to_existing_atom(Id),Reason});
[<<"fatal">>, Id, Reason] ->
?LOG_INFO("OS Process ~p Fatal Error :: ~s ~p",[OsProc#os_proc.port, Id, Reason]),
- throw({list_to_atom(binary_to_list(Id)),Reason});
+ throw({couch_util:to_existing_atom(Id),Reason});
Result ->
Result
end.
@@ -104,6 +104,7 @@ readjson(OsProc) when is_record(OsProc, os_proc) ->
% gen_server API
init([Command, Options, PortOptions]) ->
+ process_flag(trap_exit, true),
PrivDir = couch_util:priv_dir(),
Spawnkiller = filename:join(PrivDir, "couchspawnkillable"),
BaseProc = #os_proc{
diff --git a/src/couchdb/couch_query_servers.erl b/src/couchdb/couch_query_servers.erl
index 30f4c4c7..b0e46937 100644
--- a/src/couchdb/couch_query_servers.erl
+++ b/src/couchdb/couch_query_servers.erl
@@ -15,9 +15,9 @@
-export([start_link/0]).
--export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2,code_change/3,stop/0]).
--export([start_doc_map/2, map_docs/2, stop_doc_map/1]).
--export([reduce/3, rereduce/3,validate_doc_update/4]).
+-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2,code_change/3]).
+-export([start_doc_map/3, map_docs/2, stop_doc_map/1]).
+-export([reduce/3, rereduce/3,validate_doc_update/5]).
-export([filter_docs/5]).
-export([with_ddoc_proc/2, proc_prompt/2, ddoc_prompt/3, ddoc_proc_prompt/3, json_doc/1]).
@@ -35,14 +35,25 @@
stop_fun
}).
+-record(qserver, {
+ langs, % Keyed by language name, value is {Mod,Func,Arg}
+ pid_procs, % Keyed by PID, valus is a #proc record.
+ lang_procs, % Keyed by language name, value is a #proc record
+ lang_limits, % Keyed by language name, value is {Lang, Limit, Current}
+ waitlist = [],
+ config
+}).
+
start_link() ->
gen_server:start_link({local, couch_query_servers}, couch_query_servers, [], []).
-stop() ->
- exit(whereis(couch_query_servers), normal).
-
-start_doc_map(Lang, Functions) ->
+start_doc_map(Lang, Functions, Lib) ->
Proc = get_os_process(Lang),
+ case Lib of
+ {[]} -> ok;
+ Lib ->
+ true = proc_prompt(Proc, [<<"add_lib">>, Lib])
+ end,
lists:foreach(fun(FunctionSource) ->
true = proc_prompt(Proc, [<<"add_fun">>, FunctionSource])
end, Functions),
@@ -93,7 +104,7 @@ group_reductions_results(List) ->
rereduce(_Lang, [], _ReducedValues) ->
{ok, []};
rereduce(Lang, RedSrcs, ReducedValues) ->
- Grouped = group_reductions_results(ReducedValues),
+ Grouped = group_reductions_results(ReducedValues),
Results = lists:zipwith(
fun
(<<"_", _/binary>> = FunSrc, Values) ->
@@ -133,8 +144,6 @@ os_reduce(Lang, OsRedSrcs, KVs) ->
end,
{ok, OsResults}.
-os_rereduce(_Lang, [], _KVs) ->
- {ok, []};
os_rereduce(Lang, OsRedSrcs, KVs) ->
Proc = get_os_process(Lang),
try proc_prompt(Proc, [<<"rereduce">>, OsRedSrcs, KVs]) of
@@ -146,30 +155,68 @@ os_rereduce(Lang, OsRedSrcs, KVs) ->
builtin_reduce(_Re, [], _KVs, Acc) ->
{ok, lists:reverse(Acc)};
-builtin_reduce(Re, [<<"_sum">>|BuiltinReds], KVs, Acc) ->
+builtin_reduce(Re, [<<"_sum",_/binary>>|BuiltinReds], KVs, Acc) ->
Sum = builtin_sum_rows(KVs),
builtin_reduce(Re, BuiltinReds, KVs, [Sum|Acc]);
-builtin_reduce(reduce, [<<"_count">>|BuiltinReds], KVs, Acc) ->
+builtin_reduce(reduce, [<<"_count",_/binary>>|BuiltinReds], KVs, Acc) ->
Count = length(KVs),
builtin_reduce(reduce, BuiltinReds, KVs, [Count|Acc]);
-builtin_reduce(rereduce, [<<"_count">>|BuiltinReds], KVs, Acc) ->
+builtin_reduce(rereduce, [<<"_count",_/binary>>|BuiltinReds], KVs, Acc) ->
Count = builtin_sum_rows(KVs),
- builtin_reduce(rereduce, BuiltinReds, KVs, [Count|Acc]).
+ builtin_reduce(rereduce, BuiltinReds, KVs, [Count|Acc]);
+builtin_reduce(Re, [<<"_stats",_/binary>>|BuiltinReds], KVs, Acc) ->
+ Stats = builtin_stats(Re, KVs),
+ builtin_reduce(Re, BuiltinReds, KVs, [Stats|Acc]).
builtin_sum_rows(KVs) ->
lists:foldl(fun
- ([_Key, Value], Acc) when is_number(Value) ->
+ ([_Key, Value], Acc) when is_number(Value), is_number(Acc) ->
Acc + Value;
+ ([_Key, Value], Acc) when is_list(Value), is_list(Acc) ->
+ sum_terms(Acc, Value);
+ ([_Key, Value], Acc) when is_number(Value), is_list(Acc) ->
+ sum_terms(Acc, [Value]);
+ ([_Key, Value], Acc) when is_list(Value), is_number(Acc) ->
+ sum_terms([Acc], Value);
(_Else, _Acc) ->
- throw({invalid_value, <<"builtin _sum function requires map values to be numbers">>})
+ throw({invalid_value, <<"builtin _sum function requires map values to be numbers or lists of numbers">>})
end, 0, KVs).
+sum_terms([], []) ->
+ [];
+sum_terms([_|_]=Xs, []) ->
+ Xs;
+sum_terms([], [_|_]=Ys) ->
+ Ys;
+sum_terms([X|Xs], [Y|Ys]) when is_number(X), is_number(Y) ->
+ [X+Y | sum_terms(Xs,Ys)];
+sum_terms(_, _) ->
+ throw({invalid_value, <<"builtin _sum function requires map values to be numbers or lists of numbers">>}).
+
+builtin_stats(reduce, [[_,First]|Rest]) when is_number(First) ->
+ Stats = lists:foldl(fun([_K,V], {S,C,Mi,Ma,Sq}) when is_number(V) ->
+ {S+V, C+1, lists:min([Mi, V]), lists:max([Ma, V]), Sq+(V*V)};
+ (_, _) ->
+ throw({invalid_value,
+ <<"builtin _stats function requires map values to be numbers">>})
+ end, {First,1,First,First,First*First}, Rest),
+ {Sum, Cnt, Min, Max, Sqr} = Stats,
+ {[{sum,Sum}, {count,Cnt}, {min,Min}, {max,Max}, {sumsqr,Sqr}]};
+
+builtin_stats(rereduce, [[_,First]|Rest]) ->
+ {[{sum,Sum0}, {count,Cnt0}, {min,Min0}, {max,Max0}, {sumsqr,Sqr0}]} = First,
+ Stats = lists:foldl(fun([_K,Red], {S,C,Mi,Ma,Sq}) ->
+ {[{sum,Sum}, {count,Cnt}, {min,Min}, {max,Max}, {sumsqr,Sqr}]} = Red,
+ {Sum+S, Cnt+C, lists:min([Min, Mi]), lists:max([Max, Ma]), Sqr+Sq}
+ end, {Sum0,Cnt0,Min0,Max0,Sqr0}, Rest),
+ {Sum, Cnt, Min, Max, Sqr} = Stats,
+ {[{sum,Sum}, {count,Cnt}, {min,Min}, {max,Max}, {sumsqr,Sqr}]}.
% use the function stored in ddoc.validate_doc_update to test an update.
-validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx) ->
+validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx, SecObj) ->
JsonEditDoc = couch_doc:to_json_obj(EditDoc, [revs]),
- JsonDiskDoc = json_doc(DiskDoc),
- case ddoc_prompt(DDoc, [<<"validate_doc_update">>], [JsonEditDoc, JsonDiskDoc, Ctx]) of
+ JsonDiskDoc = json_doc(DiskDoc),
+ case ddoc_prompt(DDoc, [<<"validate_doc_update">>], [JsonEditDoc, JsonDiskDoc, Ctx, SecObj]) of
1 ->
ok;
{[{<<"forbidden">>, Message}]} ->
@@ -183,13 +230,17 @@ json_doc(Doc) ->
couch_doc:to_json_obj(Doc, [revs]).
filter_docs(Req, Db, DDoc, FName, Docs) ->
- JsonReq = couch_httpd_external:json_req_obj(Req, Db),
+ JsonReq = case Req of
+ {json_req, JsonObj} ->
+ JsonObj;
+ #httpd{} = HttpReq ->
+ couch_httpd_external:json_req_obj(HttpReq, Db)
+ end,
JsonDocs = [couch_doc:to_json_obj(Doc, [revs]) || Doc <- Docs],
- JsonCtx = couch_util:json_user_ctx(Db),
- [true, Passes] = ddoc_prompt(DDoc, [<<"filters">>, FName], [JsonDocs, JsonReq, JsonCtx]),
+ [true, Passes] = ddoc_prompt(DDoc, [<<"filters">>, FName], [JsonDocs, JsonReq]),
{ok, Passes}.
-ddoc_proc_prompt({Proc, DDocId}, FunPath, Args) ->
+ddoc_proc_prompt({Proc, DDocId}, FunPath, Args) ->
proc_prompt(Proc, [<<"ddoc">>, DDocId, FunPath, Args]).
ddoc_prompt(DDoc, FunPath, Args) ->
@@ -214,100 +265,105 @@ init([]) ->
ok = couch_config:register(
fun("query_servers" ++ _, _) ->
- ?MODULE:stop()
+ supervisor:terminate_child(couch_secondary_services, query_servers),
+ supervisor:restart_child(couch_secondary_services, query_servers)
end),
ok = couch_config:register(
fun("native_query_servers" ++ _, _) ->
- ?MODULE:stop()
+ supervisor:terminate_child(couch_secondary_services, query_servers),
+ [supervisor:restart_child(couch_secondary_services, query_servers)]
+ end),
+ ok = couch_config:register(
+ fun("query_server_config" ++ _, _) ->
+ supervisor:terminate_child(couch_secondary_services, query_servers),
+ supervisor:restart_child(couch_secondary_services, query_servers)
end),
Langs = ets:new(couch_query_server_langs, [set, private]),
+ LangLimits = ets:new(couch_query_server_lang_limits, [set, private]),
PidProcs = ets:new(couch_query_server_pid_langs, [set, private]),
LangProcs = ets:new(couch_query_server_procs, [set, private]),
- InUse = ets:new(couch_query_server_used, [set, private]),
+
+ ProcTimeout = list_to_integer(couch_config:get(
+ "couchdb", "os_process_timeout", "5000")),
+ ReduceLimit = list_to_atom(
+ couch_config:get("query_server_config","reduce_limit","true")),
+ OsProcLimit = list_to_integer(
+ couch_config:get("query_server_config","os_process_limit","10")),
+
% 'query_servers' specifies an OS command-line to execute.
lists:foreach(fun({Lang, Command}) ->
+ true = ets:insert(LangLimits, {?l2b(Lang), OsProcLimit, 0}),
true = ets:insert(Langs, {?l2b(Lang),
couch_os_process, start_link, [Command]})
end, couch_config:get("query_servers")),
% 'native_query_servers' specifies a {Module, Func, Arg} tuple.
lists:foreach(fun({Lang, SpecStr}) ->
{ok, {Mod, Fun, SpecArg}} = couch_util:parse_term(SpecStr),
+ true = ets:insert(LangLimits, {?l2b(Lang), 0, 0}), % 0 means no limit
true = ets:insert(Langs, {?l2b(Lang),
Mod, Fun, SpecArg})
end, couch_config:get("native_query_servers")),
- process_flag(trap_exit, true),
- {ok, {Langs, % Keyed by language name, value is {Mod,Func,Arg}
- PidProcs, % Keyed by PID, valus is a #proc record.
- LangProcs, % Keyed by language name, value is a #proc record
- InUse % Keyed by PID, value is #proc record.
- }}.
-terminate(_Reason, _Server) ->
+
+ process_flag(trap_exit, true),
+ {ok, #qserver{
+ langs = Langs, % Keyed by language name, value is {Mod,Func,Arg}
+ pid_procs = PidProcs, % Keyed by PID, valus is a #proc record.
+ lang_procs = LangProcs, % Keyed by language name, value is a #proc record
+ lang_limits = LangLimits, % Keyed by language name, value is {Lang, Limit, Current}
+ config = {[{<<"reduce_limit">>, ReduceLimit},{<<"timeout">>, ProcTimeout}]}
+ }}.
+
+terminate(_Reason, #qserver{pid_procs=PidProcs}) ->
+ [couch_util:shutdown_sync(P) || {P,_} <- ets:tab2list(PidProcs)],
ok.
-handle_call({get_proc, #doc{body={Props}}=DDoc, DDocKey}, _From, {Langs, PidProcs, LangProcs, InUse}=Server) ->
- % Note to future self. Add max process limit.
- Lang = proplists:get_value(<<"language">>, Props, <<"javascript">>),
- case ets:lookup(LangProcs, Lang) of
- [{Lang, [P|Rest]}] ->
- % find a proc in the set that has the DDoc
- case proc_with_ddoc(DDoc, DDocKey, [P|Rest]) of
- {ok, Proc} ->
- % looks like the proc isn't getting dropped from the list.
- % we need to change this to take a fun for equality checking
- % so we can do a comparison on portnum
- rem_from_list(LangProcs, Lang, Proc),
- add_to_list(InUse, Lang, Proc),
- {reply, {ok, Proc, get_query_server_config()}, Server};
- Error ->
- {reply, Error, Server}
- end;
- _ ->
- case (catch new_process(Langs, Lang)) of
- {ok, Proc} ->
- add_value(PidProcs, Proc#proc.pid, Proc),
- case proc_with_ddoc(DDoc, DDocKey, [Proc]) of
- {ok, Proc2} ->
- rem_from_list(LangProcs, Lang, Proc),
- add_to_list(InUse, Lang, Proc2),
- {reply, {ok, Proc2, get_query_server_config()}, Server};
- Error ->
- {reply, Error, Server}
- end;
- Error ->
- {reply, Error, Server}
- end
+handle_call({get_proc, #doc{body={Props}}=DDoc, DDocKey}, From, Server) ->
+ Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ case lang_proc(Lang, Server, fun(Procs) ->
+ % find a proc in the set that has the DDoc
+ proc_with_ddoc(DDoc, DDocKey, Procs)
+ end) of
+ {ok, Proc} ->
+ {reply, {ok, Proc, Server#qserver.config}, Server};
+ wait ->
+ {noreply, add_to_waitlist({DDoc, DDocKey}, From, Server)};
+ Error ->
+ {reply, Error, Server}
end;
-handle_call({get_proc, Lang}, _From, {Langs, PidProcs, LangProcs, InUse}=Server) ->
- % Note to future self. Add max process limit.
- case ets:lookup(LangProcs, Lang) of
- [{Lang, [Proc|_]}] ->
- add_value(PidProcs, Proc#proc.pid, Proc),
- rem_from_list(LangProcs, Lang, Proc),
- add_to_list(InUse, Lang, Proc),
- {reply, {ok, Proc, get_query_server_config()}, Server};
- _ ->
- case (catch new_process(Langs, Lang)) of
- {ok, Proc} ->
- add_value(PidProcs, Proc#proc.pid, Proc),
- add_to_list(InUse, Lang, Proc),
- {reply, {ok, Proc, get_query_server_config()}, Server};
- Error ->
- {reply, Error, Server}
- end
+handle_call({get_proc, Lang}, From, Server) ->
+ case lang_proc(Lang, Server, fun([P|_Procs]) ->
+ {ok, P}
+ end) of
+ {ok, Proc} ->
+ {reply, {ok, Proc, Server#qserver.config}, Server};
+ wait ->
+ {noreply, add_to_waitlist({Lang}, From, Server)};
+ Error ->
+ {reply, Error, Server}
end;
-handle_call({ret_proc, Proc}, _From, {_, _, LangProcs, InUse}=Server) ->
+handle_call({unlink_proc, Pid}, _From, #qserver{pid_procs=PidProcs}=Server) ->
+ rem_value(PidProcs, Pid),
+ unlink(Pid),
+ {reply, ok, Server};
+handle_call({ret_proc, Proc}, _From, #qserver{
+ pid_procs=PidProcs,
+ lang_procs=LangProcs}=Server) ->
% Along with max process limit, here we should check
% if we're over the limit and discard when we are.
+ add_value(PidProcs, Proc#proc.pid, Proc),
add_to_list(LangProcs, Proc#proc.lang, Proc),
- rem_from_list(InUse, Proc#proc.lang, Proc),
- {reply, true, Server}.
+ link(Proc#proc.pid),
+ {reply, true, service_waitlist(Server)}.
handle_cast(_Whatever, Server) ->
{noreply, Server}.
-handle_info({'EXIT', Pid, Status}, {_, PidProcs, LangProcs, InUse}=Server) ->
+handle_info({'EXIT', Pid, Status}, #qserver{
+ pid_procs=PidProcs,
+ lang_procs=LangProcs,
+ lang_limits=LangLimits}=Server) ->
case ets:lookup(PidProcs, Pid) of
[{Pid, Proc}] ->
case Status of
@@ -316,11 +372,16 @@ handle_info({'EXIT', Pid, Status}, {_, PidProcs, LangProcs, InUse}=Server) ->
end,
rem_value(PidProcs, Pid),
catch rem_from_list(LangProcs, Proc#proc.lang, Proc),
- catch rem_from_list(InUse, Proc#proc.lang, Proc),
- {noreply, Server};
+ [{Lang, Lim, Current}] = ets:lookup(LangLimits, Proc#proc.lang),
+ true = ets:insert(LangLimits, {Lang, Lim, Current-1}),
+ {noreply, service_waitlist(Server)};
[] ->
- ?LOG_DEBUG("Unknown linked process died: ~p (reason: ~p)", [Pid, Status]),
- {stop, Status, Server}
+ case Status of
+ normal ->
+ {noreply, Server};
+ _ ->
+ {stop, Status, Server}
+ end
end.
code_change(_OldVsn, State, _Extra) ->
@@ -328,27 +389,94 @@ code_change(_OldVsn, State, _Extra) ->
% Private API
-get_query_server_config() ->
- ReduceLimit = list_to_atom(
- couch_config:get("query_server_config","reduce_limit","true")),
- {[{<<"reduce_limit">>, ReduceLimit}]}.
-
-new_process(Langs, Lang) ->
- case ets:lookup(Langs, Lang) of
- [{Lang, Mod, Func, Arg}] ->
- {ok, Pid} = apply(Mod, Func, Arg),
- {ok, #proc{lang=Lang,
- pid=Pid,
- % Called via proc_prompt, proc_set_timeout, and proc_stop
- prompt_fun={Mod, prompt},
- set_timeout_fun={Mod, set_timeout},
- stop_fun={Mod, stop}}};
+add_to_waitlist(Info, From, #qserver{waitlist=Waitlist}=Server) ->
+ Server#qserver{waitlist=[{Info, From}|Waitlist]}.
+
+service_waitlist(#qserver{waitlist=[]}=Server) ->
+ Server;
+service_waitlist(#qserver{waitlist=Waitlist}=Server) ->
+ [Oldest|RevWList] = lists:reverse(Waitlist),
+ case service_waiting(Oldest, Server) of
+ ok ->
+ Server#qserver{waitlist=lists:reverse(RevWList)};
+ wait ->
+ Server#qserver{waitlist=Waitlist}
+ end.
+
+% todo get rid of duplication
+service_waiting({{#doc{body={Props}}=DDoc, DDocKey}, From}, Server) ->
+ Lang = couch_util:get_value(<<"language">>, Props, <<"javascript">>),
+ case lang_proc(Lang, Server, fun(Procs) ->
+ % find a proc in the set that has the DDoc
+ proc_with_ddoc(DDoc, DDocKey, Procs)
+ end) of
+ {ok, Proc} ->
+ gen_server:reply(From, {ok, Proc, Server#qserver.config}),
+ ok;
+ wait -> % this should never happen
+ wait;
+ Error ->
+ gen_server:reply(From, Error),
+ ok
+ end;
+service_waiting({{Lang}, From}, Server) ->
+ case lang_proc(Lang, Server, fun([P|_Procs]) ->
+ {ok, P}
+ end) of
+ {ok, Proc} ->
+ gen_server:reply(From, {ok, Proc, Server#qserver.config}),
+ ok;
+ wait -> % this should never happen
+ wait;
+ Error ->
+ gen_server:reply(From, Error),
+ ok
+ end.
+
+lang_proc(Lang, #qserver{
+ langs=Langs,
+ pid_procs=PidProcs,
+ lang_procs=LangProcs,
+ lang_limits=LangLimits}, PickFun) ->
+ % Note to future self. Add max process limit.
+ case ets:lookup(LangProcs, Lang) of
+ [{Lang, [P|Procs]}] ->
+ {ok, Proc} = PickFun([P|Procs]),
+ rem_from_list(LangProcs, Lang, Proc),
+ {ok, Proc};
_ ->
- {unknown_query_language, Lang}
+ case (catch new_process(Langs, LangLimits, Lang)) of
+ {ok, Proc} ->
+ add_value(PidProcs, Proc#proc.pid, Proc),
+ PickFun([Proc]);
+ ErrorOrWait ->
+ ErrorOrWait
+ end
+ end.
+
+new_process(Langs, LangLimits, Lang) ->
+ [{Lang, Lim, Current}] = ets:lookup(LangLimits, Lang),
+ if (Lim == 0) or (Current < Lim) -> % Lim == 0 means no limit
+ % we are below the limit for our language, make a new one
+ case ets:lookup(Langs, Lang) of
+ [{Lang, Mod, Func, Arg}] ->
+ {ok, Pid} = apply(Mod, Func, Arg),
+ true = ets:insert(LangLimits, {Lang, Lim, Current+1}),
+ {ok, #proc{lang=Lang,
+ pid=Pid,
+ % Called via proc_prompt, proc_set_timeout, and proc_stop
+ prompt_fun={Mod, prompt},
+ set_timeout_fun={Mod, set_timeout},
+ stop_fun={Mod, stop}}};
+ _ ->
+ {unknown_query_language, Lang}
+ end;
+ true ->
+ wait
end.
proc_with_ddoc(DDoc, DDocKey, LangProcs) ->
- DDocProcs = lists:filter(fun(#proc{ddoc_keys=Keys}) ->
+ DDocProcs = lists:filter(fun(#proc{ddoc_keys=Keys}) ->
lists:any(fun(Key) ->
Key == DDocKey
end, Keys)
@@ -390,13 +518,13 @@ teach_ddoc(DDoc, {DDocId, _Rev}=DDocKey, #proc{ddoc_keys=Keys}=Proc) ->
get_ddoc_process(#doc{} = DDoc, DDocKey) ->
% remove this case statement
case gen_server:call(couch_query_servers, {get_proc, DDoc, DDocKey}) of
- {ok, Proc, QueryConfig} ->
+ {ok, Proc, {QueryConfig}} ->
% process knows the ddoc
- case (catch proc_prompt(Proc, [<<"reset">>, QueryConfig])) of
+ case (catch proc_prompt(Proc, [<<"reset">>, {QueryConfig}])) of
true ->
- proc_set_timeout(Proc, list_to_integer(couch_config:get(
- "couchdb", "os_process_timeout", "5000"))),
+ proc_set_timeout(Proc, couch_util:get_value(<<"timeout">>, QueryConfig)),
link(Proc#proc.pid),
+ gen_server:call(couch_query_servers, {unlink_proc, Proc#proc.pid}),
Proc;
_ ->
catch proc_stop(Proc),
@@ -406,19 +534,14 @@ get_ddoc_process(#doc{} = DDoc, DDocKey) ->
throw(Error)
end.
-ret_ddoc_process(Proc) ->
- true = gen_server:call(couch_query_servers, {ret_proc, Proc}),
- catch unlink(Proc#proc.pid),
- ok.
-
get_os_process(Lang) ->
case gen_server:call(couch_query_servers, {get_proc, Lang}) of
- {ok, Proc, QueryConfig} ->
- case (catch proc_prompt(Proc, [<<"reset">>, QueryConfig])) of
+ {ok, Proc, {QueryConfig}} ->
+ case (catch proc_prompt(Proc, [<<"reset">>, {QueryConfig}])) of
true ->
- proc_set_timeout(Proc, list_to_integer(couch_config:get(
- "couchdb", "os_process_timeout", "5000"))),
+ proc_set_timeout(Proc, couch_util:get_value(<<"timeout">>, QueryConfig)),
link(Proc#proc.pid),
+ gen_server:call(couch_query_servers, {unlink_proc, Proc#proc.pid}),
Proc;
_ ->
catch proc_stop(Proc),
diff --git a/src/couchdb/couch_ref_counter.erl b/src/couchdb/couch_ref_counter.erl
index 5c8e7437..5a111ab6 100644
--- a/src/couchdb/couch_ref_counter.erl
+++ b/src/couchdb/couch_ref_counter.erl
@@ -24,7 +24,7 @@ drop(RefCounterPid) ->
drop(RefCounterPid, self()).
drop(RefCounterPid, Pid) ->
- gen_server:cast(RefCounterPid, {drop, Pid}).
+ gen_server:call(RefCounterPid, {drop, Pid}).
add(RefCounterPid) ->
@@ -40,17 +40,18 @@ count(RefCounterPid) ->
-record(srv,
{
- referrers=dict:new() % a dict of each ref counting proc.
+ referrers=dict:new(), % a dict of each ref counting proc.
+ child_procs=[]
}).
init({Pid, ChildProcs}) ->
[link(ChildProc) || ChildProc <- ChildProcs],
Referrers = dict:from_list([{Pid, {erlang:monitor(process, Pid), 1}}]),
- {ok, #srv{referrers=Referrers}}.
+ {ok, #srv{referrers=Referrers, child_procs=ChildProcs}}.
-terminate(Reason, _Srv) ->
- couch_util:terminate_linked(Reason),
+terminate(_Reason, #srv{child_procs=ChildProcs}) ->
+ [couch_util:shutdown_sync(Pid) || Pid <- ChildProcs],
ok.
@@ -65,10 +66,8 @@ handle_call({add, Pid},_From, #srv{referrers=Referrers}=Srv) ->
{reply, ok, Srv#srv{referrers=Referrers2}};
handle_call(count, _From, Srv) ->
{monitors, Monitors} = process_info(self(), monitors),
- {reply, length(Monitors), Srv}.
-
-
-handle_cast({drop, Pid}, #srv{referrers=Referrers}=Srv) ->
+ {reply, length(Monitors), Srv};
+handle_call({drop, Pid}, _From, #srv{referrers=Referrers}=Srv) ->
Referrers2 =
case dict:find(Pid, Referrers) of
{ok, {MonRef, 1}} ->
@@ -79,7 +78,16 @@ handle_cast({drop, Pid}, #srv{referrers=Referrers}=Srv) ->
error ->
Referrers
end,
- maybe_close_async(Srv#srv{referrers=Referrers2}).
+ Srv2 = Srv#srv{referrers=Referrers2},
+ case should_close() of
+ true ->
+ {stop,normal,ok,Srv2};
+ false ->
+ {reply, ok, Srv2}
+ end.
+
+handle_cast(Msg, _Srv)->
+ exit({unknown_msg,Msg}).
code_change(_OldVsn, State, _Extra) ->
@@ -87,21 +95,17 @@ code_change(_OldVsn, State, _Extra) ->
handle_info({'DOWN', MonRef, _, Pid, _}, #srv{referrers=Referrers}=Srv) ->
{ok, {MonRef, _RefCount}} = dict:find(Pid, Referrers),
- maybe_close_async(Srv#srv{referrers=dict:erase(Pid, Referrers)}).
+ Srv2 = Srv#srv{referrers=dict:erase(Pid, Referrers)},
+ case should_close() of
+ true ->
+ {stop,normal,Srv2};
+ false ->
+ {noreply,Srv2}
+ end.
should_close() ->
case process_info(self(), monitors) of
- {monitors, []} ->
- true;
- _ ->
- false
- end.
-
-maybe_close_async(Srv) ->
- case should_close() of
- true ->
- {stop,normal,Srv};
- false ->
- {noreply,Srv}
+ {monitors, []} -> true;
+ _ -> false
end.
diff --git a/src/couchdb/couch_rep.erl b/src/couchdb/couch_rep.erl
index 98413f23..04051b8e 100644
--- a/src/couchdb/couch_rep.erl
+++ b/src/couchdb/couch_rep.erl
@@ -12,12 +12,18 @@
-module(couch_rep).
-behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-export([replicate/2, checkpoint/1]).
+-export([ensure_rep_db_exists/0, make_replication_id/2]).
+-export([start_replication/3, end_replication/1, get_result/4]).
+-export([update_rep_doc/2]).
-include("couch_db.hrl").
+-include("couch_js_functions.hrl").
+
+-define(REP_ID_VERSION, 2).
-record(state, {
changes_feed,
@@ -34,6 +40,7 @@
start_seq,
history,
+ session_id,
source_log,
target_log,
rep_starttime,
@@ -45,7 +52,10 @@
complete = false,
committed_seq = 0,
- stats = nil
+ stats = nil,
+ rep_doc = nil,
+ source_db_update_notifier = nil,
+ target_db_update_notifier = nil
}).
%% convenience function to do a simple replication from the shell
@@ -58,70 +68,95 @@ replicate(Source, Target) when is_binary(Source), is_binary(Target) ->
%% function handling POST to _replicate
replicate({Props}=PostBody, UserCtx) ->
- {BaseId, Extension} = make_replication_id(PostBody, UserCtx),
- Replicator = {BaseId ++ Extension,
- {gen_server, start_link, [?MODULE, [BaseId, PostBody, UserCtx], []]},
+ RepId = make_replication_id(PostBody, UserCtx),
+ case couch_util:get_value(<<"cancel">>, Props, false) of
+ true ->
+ end_replication(RepId);
+ false ->
+ Server = start_replication(PostBody, RepId, UserCtx),
+ get_result(Server, RepId, PostBody, UserCtx)
+ end.
+
+end_replication({BaseId, Extension}) ->
+ RepId = BaseId ++ Extension,
+ case supervisor:terminate_child(couch_rep_sup, RepId) of
+ {error, not_found} = R ->
+ R;
+ ok ->
+ ok = supervisor:delete_child(couch_rep_sup, RepId),
+ {ok, {cancelled, ?l2b(BaseId)}}
+ end.
+
+start_replication(RepDoc, {BaseId, Extension}, UserCtx) ->
+ Replicator = {
+ BaseId ++ Extension,
+ {gen_server, start_link,
+ [?MODULE, [BaseId, RepDoc, UserCtx], []]},
temporary,
1,
worker,
[?MODULE]
},
+ start_replication_server(Replicator).
- Server = start_replication_server(Replicator),
+checkpoint(Server) ->
+ gen_server:cast(Server, do_checkpoint).
- case proplists:get_value(<<"continuous">>, Props, false) of
+get_result(Server, {BaseId, _Extension}, {Props} = PostBody, UserCtx) ->
+ case couch_util:get_value(<<"continuous">>, Props, false) of
true ->
{ok, {continuous, ?l2b(BaseId)}};
false ->
- get_result(Server, PostBody, UserCtx)
+ try gen_server:call(Server, get_result, infinity) of
+ retry -> replicate(PostBody, UserCtx);
+ Else -> Else
+ catch
+ exit:{noproc, {gen_server, call, [Server, get_result, infinity]}} ->
+ %% oops, this replication just finished -- restart it.
+ replicate(PostBody, UserCtx);
+ exit:{normal, {gen_server, call, [Server, get_result, infinity]}} ->
+ %% we made the call during terminate
+ replicate(PostBody, UserCtx)
+ end
end.
-checkpoint(Server) ->
- gen_server:cast(Server, do_checkpoint).
-
-get_result(Server, PostBody, UserCtx) ->
- try gen_server:call(Server, get_result, infinity) of
- retry -> replicate(PostBody, UserCtx);
- Else -> Else
+init(InitArgs) ->
+ try
+ do_init(InitArgs)
catch
- exit:{noproc, {gen_server, call, [Server, get_result , infinity]}} ->
- %% oops, this replication just finished -- restart it.
- replicate(PostBody, UserCtx);
- exit:{normal, {gen_server, call, [Server, get_result , infinity]}} ->
- %% we made the call during terminate
- replicate(PostBody, UserCtx)
+ throw:Error ->
+ {stop, Error}
end.
-init(InitArgs) ->
- try do_init(InitArgs)
- catch throw:{db_not_found, DbUrl} -> {stop, {db_not_found, DbUrl}} end.
-
-do_init([RepId, {PostProps}, UserCtx] = InitArgs) ->
+do_init([RepId, {PostProps} = RepDoc, UserCtx] = InitArgs) ->
process_flag(trap_exit, true),
- SourceProps = proplists:get_value(<<"source">>, PostProps),
- TargetProps = proplists:get_value(<<"target">>, PostProps),
+ SourceProps = couch_util:get_value(<<"source">>, PostProps),
+ TargetProps = couch_util:get_value(<<"target">>, PostProps),
- Continuous = proplists:get_value(<<"continuous">>, PostProps, false),
- CreateTarget = proplists:get_value(<<"create_target">>, PostProps, false),
+ Continuous = couch_util:get_value(<<"continuous">>, PostProps, false),
+ CreateTarget = couch_util:get_value(<<"create_target">>, PostProps, false),
- Source = open_db(SourceProps, UserCtx),
- Target = open_db(TargetProps, UserCtx, CreateTarget),
-
- SourceLog = open_replication_log(Source, RepId),
- TargetLog = open_replication_log(Target, RepId),
+ ProxyParams = parse_proxy_params(
+ couch_util:get_value(<<"proxy">>, PostProps, [])),
+ Source = open_db(SourceProps, UserCtx, ProxyParams),
+ Target = open_db(TargetProps, UserCtx, ProxyParams, CreateTarget),
SourceInfo = dbinfo(Source),
TargetInfo = dbinfo(Target),
-
+
+ maybe_set_triggered(RepDoc, RepId),
+
+ [SourceLog, TargetLog] = find_replication_logs(
+ [Source, Target], RepId, {PostProps}, UserCtx),
{StartSeq, History} = compare_replication_logs(SourceLog, TargetLog),
{ok, ChangesFeed} =
- couch_rep_changes_feed:start_link(self(), Source, StartSeq, PostProps),
+ couch_rep_changes_feed:start_link(self(), Source, StartSeq, PostProps),
{ok, MissingRevs} =
- couch_rep_missing_revs:start_link(self(), Target, ChangesFeed, PostProps),
+ couch_rep_missing_revs:start_link(self(), Target, ChangesFeed, PostProps),
{ok, Reader} =
- couch_rep_reader:start_link(self(), Source, MissingRevs, PostProps),
+ couch_rep_reader:start_link(self(), Source, MissingRevs, PostProps),
{ok, Writer} =
couch_rep_writer:start_link(self(), Target, Reader, PostProps),
@@ -152,11 +187,15 @@ do_init([RepId, {PostProps}, UserCtx] = InitArgs) ->
start_seq = StartSeq,
history = History,
+ session_id = couch_uuids:random(),
source_log = SourceLog,
target_log = TargetLog,
rep_starttime = httpd_util:rfc1123_date(),
- src_starttime = proplists:get_value(instance_start_time, SourceInfo),
- tgt_starttime = proplists:get_value(instance_start_time, TargetInfo)
+ src_starttime = couch_util:get_value(instance_start_time, SourceInfo),
+ tgt_starttime = couch_util:get_value(instance_start_time, TargetInfo),
+ rep_doc = RepDoc,
+ source_db_update_notifier = source_db_update_notifier(Source),
+ target_db_update_notifier = target_db_update_notifier(Target)
},
{ok, State}.
@@ -164,7 +203,21 @@ handle_call(get_result, From, #state{complete=true, listeners=[]} = State) ->
{stop, normal, State#state{listeners=[From]}};
handle_call(get_result, From, State) ->
Listeners = State#state.listeners,
- {noreply, State#state{listeners=[From|Listeners]}}.
+ {noreply, State#state{listeners=[From|Listeners]}};
+
+handle_call(get_source_db, _From, #state{source = Source} = State) ->
+ {reply, {ok, Source}, State};
+
+handle_call(get_target_db, _From, #state{target = Target} = State) ->
+ {reply, {ok, Target}, State}.
+
+handle_cast(reopen_source_db, #state{source = Source} = State) ->
+ {ok, NewSource} = couch_db:reopen(Source),
+ {noreply, State#state{source = NewSource}};
+
+handle_cast(reopen_target_db, #state{target = Target} = State) ->
+ {ok, NewTarget} = couch_db:reopen(Target),
+ {noreply, State#state{target = NewTarget}};
handle_cast(do_checkpoint, State) ->
{noreply, do_checkpoint(State)};
@@ -213,23 +266,26 @@ handle_info({'EXIT', _Pid, Reason}, State) ->
{stop, Reason, State}.
terminate(normal, #state{checkpoint_scheduled=nil} = State) ->
- do_terminate(State);
+ do_terminate(State),
+ update_rep_doc(
+ State#state.rep_doc, [{<<"_replication_state">>, <<"completed">>}]);
terminate(normal, State) ->
timer:cancel(State#state.checkpoint_scheduled),
- do_terminate(do_checkpoint(State));
+ do_terminate(do_checkpoint(State)),
+ update_rep_doc(
+ State#state.rep_doc, [{<<"_replication_state">>, <<"completed">>}]);
-terminate(Reason, State) ->
- #state{
- listeners = Listeners,
- source = Source,
- target = Target,
- stats = Stats
- } = State,
+terminate(shutdown, #state{listeners = Listeners} = State) ->
+ % continuous replication stopped
+ [gen_server:reply(L, {ok, stopped}) || L <- Listeners],
+ terminate_cleanup(State);
+
+terminate(Reason, #state{listeners = Listeners} = State) ->
[gen_server:reply(L, {error, Reason}) || L <- Listeners],
- ets:delete(Stats),
- close_db(Target),
- close_db(Source).
+ terminate_cleanup(State),
+ update_rep_doc(
+ State#state.rep_doc, [{<<"_replication_state">>, <<"error">>}]).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
@@ -254,29 +310,37 @@ start_replication_server(Replicator) ->
{error, {already_started, Pid}} =
supervisor:start_child(couch_rep_sup, Replicator),
?LOG_DEBUG("replication ~p already running at ~p", [RepId, Pid]),
- Pid
+ Pid;
+ {error, {db_not_found, DbUrl}} ->
+ throw({db_not_found, <<"could not open ", DbUrl/binary>>});
+ {error, {unauthorized, DbUrl}} ->
+ throw({unauthorized,
+ <<"unauthorized to access database ", DbUrl/binary>>})
end;
{error, {already_started, Pid}} ->
?LOG_DEBUG("replication ~p already running at ~p", [RepId, Pid]),
Pid;
{error, {{db_not_found, DbUrl}, _}} ->
- throw({db_not_found, <<"could not open ", DbUrl/binary>>})
+ throw({db_not_found, <<"could not open ", DbUrl/binary>>});
+ {error, {{unauthorized, DbUrl}, _}} ->
+ throw({unauthorized,
+ <<"unauthorized to access database ", DbUrl/binary>>})
end.
compare_replication_logs(SrcDoc, TgtDoc) ->
#doc{body={RepRecProps}} = SrcDoc,
#doc{body={RepRecPropsTgt}} = TgtDoc,
- case proplists:get_value(<<"session_id">>, RepRecProps) ==
- proplists:get_value(<<"session_id">>, RepRecPropsTgt) of
+ case couch_util:get_value(<<"session_id">>, RepRecProps) ==
+ couch_util:get_value(<<"session_id">>, RepRecPropsTgt) of
true ->
% if the records have the same session id,
% then we have a valid replication history
- OldSeqNum = proplists:get_value(<<"source_last_seq">>, RepRecProps, 0),
- OldHistory = proplists:get_value(<<"history">>, RepRecProps, []),
+ OldSeqNum = couch_util:get_value(<<"source_last_seq">>, RepRecProps, 0),
+ OldHistory = couch_util:get_value(<<"history">>, RepRecProps, []),
{OldSeqNum, OldHistory};
false ->
- SourceHistory = proplists:get_value(<<"history">>, RepRecProps, []),
- TargetHistory = proplists:get_value(<<"history">>, RepRecPropsTgt, []),
+ SourceHistory = couch_util:get_value(<<"history">>, RepRecProps, []),
+ TargetHistory = couch_util:get_value(<<"history">>, RepRecPropsTgt, []),
?LOG_INFO("Replication records differ. "
"Scanning histories to find a common ancestor.", []),
?LOG_DEBUG("Record on source:~p~nRecord on target:~p~n",
@@ -288,18 +352,18 @@ compare_rep_history(S, T) when S =:= [] orelse T =:= [] ->
?LOG_INFO("no common ancestry -- performing full replication", []),
{0, []};
compare_rep_history([{S}|SourceRest], [{T}|TargetRest]=Target) ->
- SourceId = proplists:get_value(<<"session_id">>, S),
+ SourceId = couch_util:get_value(<<"session_id">>, S),
case has_session_id(SourceId, Target) of
true ->
- RecordSeqNum = proplists:get_value(<<"recorded_seq">>, S, 0),
+ RecordSeqNum = couch_util:get_value(<<"recorded_seq">>, S, 0),
?LOG_INFO("found a common replication record with source_seq ~p",
[RecordSeqNum]),
{RecordSeqNum, SourceRest};
false ->
- TargetId = proplists:get_value(<<"session_id">>, T),
+ TargetId = couch_util:get_value(<<"session_id">>, T),
case has_session_id(TargetId, SourceRest) of
true ->
- RecordSeqNum = proplists:get_value(<<"recorded_seq">>, T, 0),
+ RecordSeqNum = couch_util:get_value(<<"recorded_seq">>, T, 0),
?LOG_INFO("found a common replication record with source_seq ~p",
[RecordSeqNum]),
{RecordSeqNum, TargetRest};
@@ -314,13 +378,13 @@ close_db(Db) ->
couch_db:close(Db).
dbname(#http_db{url = Url}) ->
- Url;
+ couch_util:url_strip_password(Url);
dbname(#db{name = Name}) ->
Name.
dbinfo(#http_db{} = Db) ->
{DbProps} = couch_rep_httpc:request(Db),
- [{list_to_atom(?b2l(K)), V} || {K,V} <- DbProps];
+ [{list_to_existing_atom(?b2l(K)), V} || {K,V} <- DbProps];
dbinfo(Db) ->
{ok, Info} = couch_db:get_db_info(Db),
Info.
@@ -331,14 +395,9 @@ do_terminate(State) ->
committed_seq = NewSeq,
listeners = Listeners,
source = Source,
- target = Target,
continuous = Continuous,
- stats = Stats,
source_log = #doc{body={OldHistory}}
} = State,
- couch_task_status:update("Finishing"),
- ets:delete(Stats),
- close_db(Target),
NewRepHistory = case CheckpointHistory of
nil ->
@@ -366,22 +425,35 @@ do_terminate(State) ->
false ->
[gen_server:reply(R, retry) || R <- OtherListeners]
end,
- close_db(Source).
+ couch_task_status:update("Finishing"),
+ terminate_cleanup(State).
+
+terminate_cleanup(State) ->
+ close_db(State#state.source),
+ close_db(State#state.target),
+ stop_db_update_notifier(State#state.source_db_update_notifier),
+ stop_db_update_notifier(State#state.target_db_update_notifier),
+ ets:delete(State#state.stats).
+
+stop_db_update_notifier(nil) ->
+ ok;
+stop_db_update_notifier(Notifier) ->
+ couch_db_update_notifier:stop(Notifier).
has_session_id(_SessionId, []) ->
false;
has_session_id(SessionId, [{Props} | Rest]) ->
- case proplists:get_value(<<"session_id">>, Props, nil) of
+ case couch_util:get_value(<<"session_id">>, Props, nil) of
SessionId ->
true;
_Else ->
has_session_id(SessionId, Rest)
end.
-maybe_append_options(Options, Props) ->
+maybe_append_options(Options, {Props}) ->
lists:foldl(fun(Option, Acc) ->
- Acc ++
- case proplists:get_value(Option, Props, false) of
+ Acc ++
+ case couch_util:get_value(Option, Props, false) of
true ->
"+" ++ ?b2l(Option);
false ->
@@ -389,25 +461,67 @@ maybe_append_options(Options, Props) ->
end
end, [], Options).
-make_replication_id({Props}, UserCtx) ->
- %% funky algorithm to preserve backwards compatibility
- {ok, HostName} = inet:gethostname(),
- % Port = mochiweb_socket_server:get(couch_httpd, port),
- Src = get_rep_endpoint(UserCtx, proplists:get_value(<<"source">>, Props)),
- Tgt = get_rep_endpoint(UserCtx, proplists:get_value(<<"target">>, Props)),
- Base = couch_util:to_hex(erlang:md5(term_to_binary([HostName, Src, Tgt]))),
+make_replication_id(RepProps, UserCtx) ->
+ BaseId = make_replication_id(RepProps, UserCtx, ?REP_ID_VERSION),
Extension = maybe_append_options(
- [<<"continuous">>, <<"create_target">>], Props),
- {Base, Extension}.
+ [<<"continuous">>, <<"create_target">>], RepProps),
+ {BaseId, Extension}.
+
+% Versioned clauses for generating replication ids
+% If a change is made to how replications are identified
+% add a new clause and increase ?REP_ID_VERSION at the top
+make_replication_id({Props}, UserCtx, 2) ->
+ {ok, HostName} = inet:gethostname(),
+ Port = mochiweb_socket_server:get(couch_httpd, port),
+ Src = get_rep_endpoint(UserCtx, couch_util:get_value(<<"source">>, Props)),
+ Tgt = get_rep_endpoint(UserCtx, couch_util:get_value(<<"target">>, Props)),
+ maybe_append_filters({Props}, [HostName, Port, Src, Tgt], UserCtx);
+make_replication_id({Props}, UserCtx, 1) ->
+ {ok, HostName} = inet:gethostname(),
+ Src = get_rep_endpoint(UserCtx, couch_util:get_value(<<"source">>, Props)),
+ Tgt = get_rep_endpoint(UserCtx, couch_util:get_value(<<"target">>, Props)),
+ maybe_append_filters({Props}, [HostName, Src, Tgt], UserCtx).
+
+maybe_append_filters({Props}, Base, UserCtx) ->
+ Base2 = Base ++
+ case couch_util:get_value(<<"filter">>, Props) of
+ undefined ->
+ case couch_util:get_value(<<"doc_ids">>, Props) of
+ undefined ->
+ [];
+ DocIds ->
+ [DocIds]
+ end;
+ Filter ->
+ [filter_code(Filter, Props, UserCtx),
+ couch_util:get_value(<<"query_params">>, Props, {[]})]
+ end,
+ couch_util:to_hex(couch_util:md5(term_to_binary(Base2))).
+
+filter_code(Filter, Props, UserCtx) ->
+ {match, [DDocName, FilterName]} =
+ re:run(Filter, "(.*?)/(.*)", [{capture, [1, 2], binary}]),
+ ProxyParams = parse_proxy_params(
+ couch_util:get_value(<<"proxy">>, Props, [])),
+ Source = open_db(
+ couch_util:get_value(<<"source">>, Props), UserCtx, ProxyParams),
+ try
+ {ok, DDoc} = open_doc(Source, <<"_design/", DDocName/binary>>),
+ Code = couch_util:get_nested_json_value(
+ DDoc#doc.body, [<<"filters">>, FilterName]),
+ re:replace(Code, "^\s*(.*?)\s*$", "\\1", [{return, binary}])
+ after
+ close_db(Source)
+ end.
maybe_add_trailing_slash(Url) ->
re:replace(Url, "[^/]$", "&/", [{return, list}]).
get_rep_endpoint(_UserCtx, {Props}) ->
- Url = maybe_add_trailing_slash(proplists:get_value(<<"url">>, Props)),
- {BinHeaders} = proplists:get_value(<<"headers">>, Props, {[]}),
- {Auth} = proplists:get_value(<<"auth">>, Props, {[]}),
- case proplists:get_value(<<"oauth">>, Auth) of
+ Url = maybe_add_trailing_slash(couch_util:get_value(<<"url">>, Props)),
+ {BinHeaders} = couch_util:get_value(<<"headers">>, Props, {[]}),
+ {Auth} = couch_util:get_value(<<"auth">>, Props, {[]}),
+ case couch_util:get_value(<<"oauth">>, Auth) of
undefined ->
{remote, Url, [{?b2l(K),?b2l(V)} || {K,V} <- BinHeaders]};
{OAuth} ->
@@ -420,60 +534,97 @@ get_rep_endpoint(_UserCtx, <<"https://",_/binary>>=Url) ->
get_rep_endpoint(UserCtx, <<DbName/binary>>) ->
{local, DbName, UserCtx}.
-open_replication_log(#http_db{}=Db, RepId) ->
- DocId = ?LOCAL_DOC_PREFIX ++ RepId,
- Req = Db#http_db{resource=couch_util:url_encode(DocId)},
+find_replication_logs(DbList, RepId, RepProps, UserCtx) ->
+ LogId = ?l2b(?LOCAL_DOC_PREFIX ++ RepId),
+ fold_replication_logs(DbList, ?REP_ID_VERSION,
+ LogId, LogId, RepProps, UserCtx, []).
+
+% Accumulate the replication logs
+% Falls back to older log document ids and migrates them
+fold_replication_logs([], _Vsn, _LogId, _NewId, _RepProps, _UserCtx, Acc) ->
+ lists:reverse(Acc);
+fold_replication_logs([Db|Rest]=Dbs, Vsn, LogId, NewId,
+ RepProps, UserCtx, Acc) ->
+ case open_replication_log(Db, LogId) of
+ {error, not_found} when Vsn > 1 ->
+ OldRepId = make_replication_id(RepProps, UserCtx, Vsn - 1),
+ fold_replication_logs(Dbs, Vsn - 1,
+ ?l2b(?LOCAL_DOC_PREFIX ++ OldRepId), NewId, RepProps, UserCtx, Acc);
+ {error, not_found} ->
+ fold_replication_logs(Rest, ?REP_ID_VERSION, NewId, NewId,
+ RepProps, UserCtx, [#doc{id=NewId}|Acc]);
+ {ok, Doc} when LogId =:= NewId ->
+ fold_replication_logs(Rest, ?REP_ID_VERSION, NewId, NewId,
+ RepProps, UserCtx, [Doc|Acc]);
+ {ok, Doc} ->
+ MigratedLog = #doc{id=NewId,body=Doc#doc.body},
+ fold_replication_logs(Rest, ?REP_ID_VERSION, NewId, NewId,
+ RepProps, UserCtx, [MigratedLog|Acc])
+ end.
+
+open_replication_log(Db, DocId) ->
+ case open_doc(Db, DocId) of
+ {ok, Doc} ->
+ ?LOG_DEBUG("found a replication log for ~s", [dbname(Db)]),
+ {ok, Doc};
+ _ ->
+ ?LOG_DEBUG("didn't find a replication log for ~s", [dbname(Db)]),
+ {error, not_found}
+ end.
+
+open_doc(#http_db{} = Db, DocId) ->
+ Req = Db#http_db{resource = couch_util:encode_doc_id(DocId)},
case couch_rep_httpc:request(Req) of
{[{<<"error">>, _}, {<<"reason">>, _}]} ->
- ?LOG_DEBUG("didn't find a replication log for ~s", [Db#http_db.url]),
- #doc{id=?l2b(DocId)};
+ {error, not_found};
Doc ->
- ?LOG_DEBUG("found a replication log for ~s", [Db#http_db.url]),
- couch_doc:from_json_obj(Doc)
+ {ok, couch_doc:from_json_obj(Doc)}
end;
-open_replication_log(Db, RepId) ->
- DocId = ?l2b(?LOCAL_DOC_PREFIX ++ RepId),
- case couch_db:open_doc(Db, DocId, []) of
- {ok, Doc} ->
- ?LOG_DEBUG("found a replication log for ~s", [Db#db.name]),
- Doc;
- _ ->
- ?LOG_DEBUG("didn't find a replication log for ~s", [Db#db.name]),
- #doc{id=DocId}
- end.
+open_doc(Db, DocId) ->
+ couch_db:open_doc(Db, DocId).
-open_db(Props, UserCtx) ->
- open_db(Props, UserCtx, false).
+open_db(Props, UserCtx, ProxyParams) ->
+ open_db(Props, UserCtx, ProxyParams, false).
-open_db({Props}, _UserCtx, CreateTarget) ->
- Url = maybe_add_trailing_slash(proplists:get_value(<<"url">>, Props)),
- {AuthProps} = proplists:get_value(<<"auth">>, Props, {[]}),
- {BinHeaders} = proplists:get_value(<<"headers">>, Props, {[]}),
+open_db({Props}, _UserCtx, ProxyParams, CreateTarget) ->
+ Url = maybe_add_trailing_slash(couch_util:get_value(<<"url">>, Props)),
+ {AuthProps} = couch_util:get_value(<<"auth">>, Props, {[]}),
+ {BinHeaders} = couch_util:get_value(<<"headers">>, Props, {[]}),
Headers = [{?b2l(K),?b2l(V)} || {K,V} <- BinHeaders],
DefaultHeaders = (#http_db{})#http_db.headers,
- Db = #http_db{
+ Db1 = #http_db{
url = Url,
auth = AuthProps,
headers = lists:ukeymerge(1, Headers, DefaultHeaders)
},
+ Db = Db1#http_db{
+ options = Db1#http_db.options ++ ProxyParams ++
+ couch_rep_httpc:ssl_options(Db1)
+ },
couch_rep_httpc:db_exists(Db, CreateTarget);
-open_db(<<"http://",_/binary>>=Url, _, CreateTarget) ->
- open_db({[{<<"url">>,Url}]}, [], CreateTarget);
-open_db(<<"https://",_/binary>>=Url, _, CreateTarget) ->
- open_db({[{<<"url">>,Url}]}, [], CreateTarget);
-open_db(<<DbName/binary>>, UserCtx, CreateTarget) ->
- case CreateTarget of
- true ->
- ok = couch_httpd:verify_is_server_admin(UserCtx),
- couch_server:create(DbName, [{user_ctx, UserCtx}]);
- false -> ok
- end,
-
- case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
- {ok, Db} ->
- couch_db:monitor(Db),
- Db;
- {not_found, no_db_file} -> throw({db_not_found, DbName})
+open_db(<<"http://",_/binary>>=Url, _, ProxyParams, CreateTarget) ->
+ open_db({[{<<"url">>,Url}]}, [], ProxyParams, CreateTarget);
+open_db(<<"https://",_/binary>>=Url, _, ProxyParams, CreateTarget) ->
+ open_db({[{<<"url">>,Url}]}, [], ProxyParams, CreateTarget);
+open_db(<<DbName/binary>>, UserCtx, _ProxyParams, CreateTarget) ->
+ try
+ case CreateTarget of
+ true ->
+ ok = couch_httpd:verify_is_server_admin(UserCtx),
+ couch_server:create(DbName, [{user_ctx, UserCtx}]);
+ false ->
+ ok
+ end,
+
+ case couch_db:open(DbName, [{user_ctx, UserCtx}]) of
+ {ok, Db} ->
+ couch_db:monitor(Db),
+ Db;
+ {not_found, no_db_file} ->
+ throw({db_not_found, DbName})
+ end
+ catch throw:{unauthorized, _} ->
+ throw({unauthorized, DbName})
end.
schedule_checkpoint(#state{checkpoint_scheduled = nil} = State) ->
@@ -495,38 +646,59 @@ do_checkpoint(State) ->
committed_seq = NewSeqNum,
start_seq = StartSeqNum,
history = OldHistory,
+ session_id = SessionId,
source_log = SourceLog,
target_log = TargetLog,
rep_starttime = ReplicationStartTime,
src_starttime = SrcInstanceStartTime,
tgt_starttime = TgtInstanceStartTime,
- stats = Stats
+ stats = Stats,
+ rep_doc = {RepDoc}
} = State,
case commit_to_both(Source, Target, NewSeqNum) of
{SrcInstanceStartTime, TgtInstanceStartTime} ->
?LOG_INFO("recording a checkpoint for ~s -> ~s at source update_seq ~p",
[dbname(Source), dbname(Target), NewSeqNum]),
- SessionId = couch_uuids:random(),
+ EndTime = ?l2b(httpd_util:rfc1123_date()),
+ StartTime = ?l2b(ReplicationStartTime),
+ DocsRead = ets:lookup_element(Stats, docs_read, 2),
+ DocsWritten = ets:lookup_element(Stats, docs_written, 2),
+ DocWriteFailures = ets:lookup_element(Stats, doc_write_failures, 2),
NewHistoryEntry = {[
{<<"session_id">>, SessionId},
- {<<"start_time">>, list_to_binary(ReplicationStartTime)},
- {<<"end_time">>, list_to_binary(httpd_util:rfc1123_date())},
+ {<<"start_time">>, StartTime},
+ {<<"end_time">>, EndTime},
{<<"start_last_seq">>, StartSeqNum},
{<<"end_last_seq">>, NewSeqNum},
{<<"recorded_seq">>, NewSeqNum},
{<<"missing_checked">>, ets:lookup_element(Stats, total_revs, 2)},
{<<"missing_found">>, ets:lookup_element(Stats, missing_revs, 2)},
- {<<"docs_read">>, ets:lookup_element(Stats, docs_read, 2)},
- {<<"docs_written">>, ets:lookup_element(Stats, docs_written, 2)},
- {<<"doc_write_failures">>,
- ets:lookup_element(Stats, doc_write_failures, 2)}
+ {<<"docs_read">>, DocsRead},
+ {<<"docs_written">>, DocsWritten},
+ {<<"doc_write_failures">>, DocWriteFailures}
]},
- % limit history to 50 entries
- NewRepHistory = {[
+ BaseHistory = [
{<<"session_id">>, SessionId},
- {<<"source_last_seq">>, NewSeqNum},
- {<<"history">>, lists:sublist([NewHistoryEntry | OldHistory], 50)}
- ]},
+ {<<"source_last_seq">>, NewSeqNum}
+ ] ++ case couch_util:get_value(<<"doc_ids">>, RepDoc) of
+ undefined ->
+ [];
+ DocIds when is_list(DocIds) ->
+ % backwards compatibility with the result of a replication by
+ % doc IDs in versions 0.11.x and 1.0.x
+ [
+ {<<"start_time">>, StartTime},
+ {<<"end_time">>, EndTime},
+ {<<"docs_read">>, DocsRead},
+ {<<"docs_written">>, DocsWritten},
+ {<<"doc_write_failures">>, DocWriteFailures}
+ ]
+ end,
+ % limit history to 50 entries
+ NewRepHistory = {
+ BaseHistory ++
+ [{<<"history">>, lists:sublist([NewHistoryEntry | OldHistory], 50)}]
+ },
try
{SrcRevPos,SrcRevId} =
@@ -559,7 +731,7 @@ do_checkpoint(State) ->
close_db(Target),
close_db(Source),
{ok, NewState} = init(State#state.init_args),
- NewState
+ NewState#state{listeners=State#state.listeners}
end.
commit_to_both(Source, Target, RequiredSeq) ->
@@ -580,17 +752,22 @@ commit_to_both(Source, Target, RequiredSeq) ->
end,
{SourceStartTime, TargetStartTime}.
-ensure_full_commit(#http_db{} = Target) ->
+ensure_full_commit(#http_db{headers = Headers} = Target) ->
+ Headers1 = [
+ {"Content-Length", 0} |
+ couch_util:proplist_apply_field(
+ {"Content-Type", "application/json"}, Headers)
+ ],
Req = Target#http_db{
resource = "_ensure_full_commit",
method = post,
- body = true
+ headers = Headers1
},
{ResultProps} = couch_rep_httpc:request(Req),
- true = proplists:get_value(<<"ok">>, ResultProps),
- proplists:get_value(<<"instance_start_time">>, ResultProps);
+ true = couch_util:get_value(<<"ok">>, ResultProps),
+ couch_util:get_value(<<"instance_start_time">>, ResultProps);
ensure_full_commit(Target) ->
- {ok, NewDb} = couch_db:open(Target#db.name, []),
+ {ok, NewDb} = couch_db:open_int(Target#db.name, []),
UpdateSeq = couch_db:get_update_seq(Target),
CommitSeq = couch_db:get_committed_update_seq(NewDb),
InstanceStartTime = NewDb#db.instance_start_time,
@@ -605,20 +782,25 @@ ensure_full_commit(Target) ->
InstanceStartTime
end.
-ensure_full_commit(#http_db{} = Source, RequiredSeq) ->
+ensure_full_commit(#http_db{headers = Headers} = Source, RequiredSeq) ->
+ Headers1 = [
+ {"Content-Length", 0} |
+ couch_util:proplist_apply_field(
+ {"Content-Type", "application/json"}, Headers)
+ ],
Req = Source#http_db{
resource = "_ensure_full_commit",
method = post,
- body = true,
- qs = [{seq, RequiredSeq}]
+ qs = [{seq, RequiredSeq}],
+ headers = Headers1
},
{ResultProps} = couch_rep_httpc:request(Req),
- case proplists:get_value(<<"ok">>, ResultProps) of
+ case couch_util:get_value(<<"ok">>, ResultProps) of
true ->
- proplists:get_value(<<"instance_start_time">>, ResultProps);
+ couch_util:get_value(<<"instance_start_time">>, ResultProps);
undefined -> nil end;
ensure_full_commit(Source, RequiredSeq) ->
- {ok, NewDb} = couch_db:open(Source#db.name, []),
+ {ok, NewDb} = couch_db:open_int(Source#db.name, []),
CommitSeq = couch_db:get_committed_update_seq(NewDb),
InstanceStartTime = NewDb#db.instance_start_time,
couch_db:close(NewDb),
@@ -632,15 +814,15 @@ ensure_full_commit(Source, RequiredSeq) ->
InstanceStartTime
end.
-update_local_doc(#http_db{} = Db, #doc{id=DocId} = Doc) ->
+update_local_doc(#http_db{} = Db, Doc) ->
Req = Db#http_db{
- resource = couch_util:url_encode(DocId),
+ resource = couch_util:encode_doc_id(Doc),
method = put,
body = couch_doc:to_json_obj(Doc, [attachments]),
headers = [{"x-couch-full-commit", "false"} | Db#http_db.headers]
},
{ResponseMembers} = couch_rep_httpc:request(Req),
- Rev = proplists:get_value(<<"rev">>, ResponseMembers),
+ Rev = couch_util:get_value(<<"rev">>, ResponseMembers),
couch_doc:parse_rev(Rev);
update_local_doc(Db, Doc) ->
{ok, Result} = couch_db:update_doc(Db, Doc, [delay_commit]),
@@ -649,7 +831,129 @@ update_local_doc(Db, Doc) ->
up_to_date(#http_db{}, _Seq) ->
true;
up_to_date(Source, Seq) ->
- {ok, NewDb} = couch_db:open(Source#db.name, []),
+ {ok, NewDb} = couch_db:open_int(Source#db.name, []),
T = NewDb#db.update_seq == Seq,
couch_db:close(NewDb),
T.
+
+parse_proxy_params(ProxyUrl) when is_binary(ProxyUrl) ->
+ parse_proxy_params(?b2l(ProxyUrl));
+parse_proxy_params([]) ->
+ [];
+parse_proxy_params(ProxyUrl) ->
+ {url, _, Base, Port, User, Passwd, _Path, _Proto} =
+ ibrowse_lib:parse_url(ProxyUrl),
+ [{proxy_host, Base}, {proxy_port, Port}] ++
+ case is_list(User) andalso is_list(Passwd) of
+ false ->
+ [];
+ true ->
+ [{proxy_user, User}, {proxy_password, Passwd}]
+ end.
+
+update_rep_doc({Props} = _RepDoc, KVs) ->
+ case couch_util:get_value(<<"_id">>, Props) of
+ undefined ->
+ % replication triggered by POSTing to _replicate/
+ ok;
+ RepDocId ->
+ % replication triggered by adding a Rep Doc to the replicator DB
+ {ok, RepDb} = ensure_rep_db_exists(),
+ case couch_db:open_doc(RepDb, RepDocId, []) of
+ {ok, LatestRepDoc} ->
+ update_rep_doc(RepDb, LatestRepDoc, KVs);
+ _ ->
+ ok
+ end,
+ couch_db:close(RepDb)
+ end.
+
+update_rep_doc(RepDb, #doc{body = {RepDocBody}} = RepDoc, KVs) ->
+ NewRepDocBody = lists:foldl(
+ fun({<<"_replication_state">> = K, _V} = KV, Body) ->
+ Body1 = lists:keystore(K, 1, Body, KV),
+ {Mega, Secs, _} = erlang:now(),
+ UnixTime = Mega * 1000000 + Secs,
+ lists:keystore(
+ <<"_replication_state_time">>, 1,
+ Body1, {<<"_replication_state_time">>, UnixTime});
+ ({K, _V} = KV, Body) ->
+ lists:keystore(K, 1, Body, KV)
+ end,
+ RepDocBody,
+ KVs
+ ),
+ % might not succeed - when the replication doc is deleted right
+ % before this update (not an error)
+ couch_db:update_doc(
+ RepDb,
+ RepDoc#doc{body = {NewRepDocBody}},
+ []
+ ).
+
+maybe_set_triggered({RepProps} = RepDoc, RepId) ->
+ case couch_util:get_value(<<"_replication_state">>, RepProps) of
+ <<"triggered">> ->
+ ok;
+ _ ->
+ update_rep_doc(
+ RepDoc,
+ [
+ {<<"_replication_state">>, <<"triggered">>},
+ {<<"_replication_id">>, ?l2b(RepId)}
+ ]
+ )
+ end.
+
+ensure_rep_db_exists() ->
+ DbName = ?l2b(couch_config:get("replicator", "db", "_replicator")),
+ Opts = [
+ {user_ctx, #user_ctx{roles=[<<"_admin">>, <<"_replicator">>]}},
+ sys_db
+ ],
+ case couch_db:open(DbName, Opts) of
+ {ok, Db} ->
+ Db;
+ _Error ->
+ {ok, Db} = couch_db:create(DbName, Opts)
+ end,
+ ok = ensure_rep_ddoc_exists(Db, <<"_design/_replicator">>),
+ {ok, Db}.
+
+ensure_rep_ddoc_exists(RepDb, DDocID) ->
+ case couch_db:open_doc(RepDb, DDocID, []) of
+ {ok, _Doc} ->
+ ok;
+ _ ->
+ DDoc = couch_doc:from_json_obj({[
+ {<<"_id">>, DDocID},
+ {<<"language">>, <<"javascript">>},
+ {<<"validate_doc_update">>, ?REP_DB_DOC_VALIDATE_FUN}
+ ]}),
+ {ok, _Rev} = couch_db:update_doc(RepDb, DDoc, [])
+ end,
+ ok.
+
+source_db_update_notifier(#db{name = DbName}) ->
+ Server = self(),
+ {ok, Notifier} = couch_db_update_notifier:start_link(
+ fun({compacted, DbName1}) when DbName1 =:= DbName ->
+ ok = gen_server:cast(Server, reopen_source_db);
+ (_) ->
+ ok
+ end),
+ Notifier;
+source_db_update_notifier(_) ->
+ nil.
+
+target_db_update_notifier(#db{name = DbName}) ->
+ Server = self(),
+ {ok, Notifier} = couch_db_update_notifier:start_link(
+ fun({compacted, DbName1}) when DbName1 =:= DbName ->
+ ok = gen_server:cast(Server, reopen_target_db);
+ (_) ->
+ ok
+ end),
+ Notifier;
+target_db_update_notifier(_) ->
+ nil.
diff --git a/src/couchdb/couch_rep_att.erl b/src/couchdb/couch_rep_att.erl
index 6b576a01..6bb993a8 100644
--- a/src/couchdb/couch_rep_att.erl
+++ b/src/couchdb/couch_rep_att.erl
@@ -29,11 +29,13 @@ convert_stub(#att{data=stub, name=Name} = Attachment,
Attachment#att{data=RcvFun}.
cleanup() ->
- receive
+ receive
{ibrowse_async_response, _, _} ->
%% TODO maybe log, didn't expect to have data here
cleanup();
- {ibrowse_async_response_end, _} ->
+ {ibrowse_async_response_end, _} ->
+ cleanup();
+ {ibrowse_async_headers, _, _, _} ->
cleanup()
after 0 ->
erase(),
@@ -43,13 +45,27 @@ cleanup() ->
% internal funs
attachment_receiver(Ref, Request) ->
- case get(Ref) of
+ try case get(Ref) of
undefined ->
{ReqId, ContentEncoding} = start_http_request(Request),
put(Ref, {ReqId, ContentEncoding}),
receive_data(Ref, ReqId, ContentEncoding);
{ReqId, ContentEncoding} ->
receive_data(Ref, ReqId, ContentEncoding)
+ end
+ catch
+ throw:{attachment_request_failed, _} ->
+ case {Request#http_db.retries, Request#http_db.pause} of
+ {0, _} ->
+ ?LOG_INFO("request for ~p failed", [Request#http_db.resource]),
+ throw({attachment_request_failed, max_retries_reached});
+ {N, Pause} when N > 0 ->
+ ?LOG_INFO("request for ~p timed out, retrying in ~p seconds",
+ [Request#http_db.resource, Pause/1000]),
+ timer:sleep(Pause),
+ cleanup(),
+ attachment_receiver(Ref, Request#http_db{retries = N-1})
+ end
end.
receive_data(Ref, ReqId, ContentEncoding) ->
@@ -63,14 +79,12 @@ receive_data(Ref, ReqId, ContentEncoding) ->
throw({attachment_request_failed, Err});
{ibrowse_async_response, ReqId, Data} ->
% ?LOG_DEBUG("got ~p bytes for ~p", [size(Data), ReqId]),
- if ContentEncoding =:= "gzip" ->
- zlib:gunzip(Data);
- true ->
- Data
- end;
+ Data;
{ibrowse_async_response_end, ReqId} ->
?LOG_ERROR("streaming att. ended but more data requested ~p", [ReqId]),
throw({attachment_request_failed, premature_end})
+ after 31000 ->
+ throw({attachment_request_failed, timeout})
end.
start_http_request(Req) ->
@@ -84,14 +98,15 @@ start_http_request(Req) ->
{ok, ContentEncoding, NewReqId} ->
{NewReqId, ContentEncoding}
end
+ after 10000 ->
+ throw({attachment_request_failed, timeout})
end.
validate_headers(_Req, 200, Headers) ->
MochiHeaders = mochiweb_headers:make(Headers),
{ok, mochiweb_headers:get_value("Content-Encoding", MochiHeaders)};
validate_headers(Req, Code, Headers) when Code > 299, Code < 400 ->
- Url = mochiweb_headers:get_value("Location",mochiweb_headers:make(Headers)),
- NewReq = couch_rep_httpc:redirected_request(Req, Url),
+ NewReq = couch_rep_httpc:redirected_request(Code, Headers, Req),
{ibrowse_req_id, ReqId} = couch_rep_httpc:request(NewReq),
receive {ibrowse_async_headers, ReqId, NewCode, NewHeaders} ->
{ok, Encoding} = validate_headers(NewReq, list_to_integer(NewCode),
diff --git a/src/couchdb/couch_rep_changes_feed.erl b/src/couchdb/couch_rep_changes_feed.erl
index cdfed6a0..246e82c0 100644
--- a/src/couchdb/couch_rep_changes_feed.erl
+++ b/src/couchdb/couch_rep_changes_feed.erl
@@ -12,12 +12,13 @@
-module(couch_rep_changes_feed).
-behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-export([start_link/4, next/1, stop/1]).
-define(BUFFER_SIZE, 1000).
+-define(DOC_IDS_FILTER_NAME, "_doc_ids").
-include("couch_db.hrl").
-include("../ibrowse/ibrowse.hrl").
@@ -36,6 +37,11 @@
rows = queue:new()
}).
+-import(couch_util, [
+ get_value/2,
+ get_value/3
+]).
+
start_link(Parent, Source, StartSeq, PostProps) ->
gen_server:start_link(?MODULE, [Parent, Source, StartSeq, PostProps], []).
@@ -43,44 +49,64 @@ next(Server) ->
gen_server:call(Server, next_changes, infinity).
stop(Server) ->
- gen_server:call(Server, stop).
+ catch gen_server:call(Server, stop),
+ ok.
-init([_Parent, #http_db{}=Source, Since, PostProps] = Args) ->
+init([Parent, #http_db{headers = Headers0} = Source, Since, PostProps]) ->
process_flag(trap_exit, true),
- Feed = case proplists:get_value(<<"continuous">>, PostProps, false) of
+ Feed = case get_value(<<"continuous">>, PostProps, false) of
false ->
normal;
true ->
continuous
end,
+ BaseQS = [
+ {"style", all_docs},
+ {"heartbeat", 10000},
+ {"since", Since},
+ {"feed", Feed}
+ ],
+ {QS, Method, Body, Headers} = case get_value(<<"doc_ids">>, PostProps) of
+ undefined ->
+ {maybe_add_filter_qs_params(PostProps, BaseQS), get, nil, Headers0};
+ DocIds when is_list(DocIds) ->
+ Headers1 = [{"Content-Type", "application/json"} | Headers0],
+ QS1 = [{"filter", ?l2b(?DOC_IDS_FILTER_NAME)} | BaseQS],
+ {QS1, post, {[{<<"doc_ids">>, DocIds}]}, Headers1}
+ end,
Pid = couch_rep_httpc:spawn_link_worker_process(Source),
Req = Source#http_db{
+ method = Method,
+ body = Body,
resource = "_changes",
- qs = [{style, all_docs}, {heartbeat, 10000}, {since, Since},
- {feed, Feed}],
+ qs = QS,
conn = Pid,
- options = [{stream_to, {self(), once}}, {response_format, binary},
- {inactivity_timeout, 31000}], % miss 3 heartbeats, assume death
- headers = Source#http_db.headers -- [{"Accept-Encoding", "gzip"}]
+ options = [{stream_to, {self(), once}}] ++
+ lists:keydelete(inactivity_timeout, 1, Source#http_db.options),
+ headers = Headers -- [{"Accept-Encoding", "gzip"}]
},
{ibrowse_req_id, ReqId} = couch_rep_httpc:request(Req),
+ Args = [Parent, Req, Since, PostProps],
receive
{ibrowse_async_headers, ReqId, "200", _} ->
ibrowse:stream_next(ReqId),
{ok, #state{conn=Pid, last_seq=Since, reqid=ReqId, init_args=Args}};
- {ibrowse_async_headers, ReqId, Code, Hdrs} when Code=="301"; Code=="302" ->
- catch ibrowse:stop_worker_process(Pid),
- Url2 = mochiweb_headers:get_value("Location", mochiweb_headers:make(Hdrs)),
- %% TODO use couch_httpc:request instead of start_http_request
- {Pid2, ReqId2} = start_http_request(Url2),
+ {ibrowse_async_headers, ReqId, Code, Hdrs}
+ when Code =:= "301"; Code =:= "302"; Code =:= "303" ->
+ stop_link_worker(Pid),
+ Req2 = couch_rep_httpc:redirected_request(Code, Hdrs, Req),
+ Pid2 = couch_rep_httpc:spawn_link_worker_process(Req2),
+ Req3 = Req2#http_db{conn = Pid2},
+ {ibrowse_req_id, ReqId2} = couch_rep_httpc:request(Req3),
+ Args2 = [Parent, Req3, Since, PostProps],
receive {ibrowse_async_headers, ReqId2, "200", _} ->
- {ok, #state{conn=Pid2, last_seq=Since, reqid=ReqId2, init_args=Args}}
+ {ok, #state{conn=Pid2, last_seq=Since, reqid=ReqId2, init_args=Args2}}
after 30000 ->
{stop, changes_timeout}
end;
{ibrowse_async_headers, ReqId, "404", _} ->
- catch ibrowse:stop_worker_process(Pid),
+ stop_link_worker(Pid),
?LOG_INFO("source doesn't have _changes, trying _all_docs_by_seq", []),
Self = self(),
BySeqPid = spawn_link(fun() -> by_seq_loop(Self, Source, Since) end),
@@ -94,20 +120,81 @@ init([_Parent, #http_db{}=Source, Since, PostProps] = Args) ->
init([_Parent, Source, Since, PostProps] = InitArgs) ->
process_flag(trap_exit, true),
Server = self(),
- ChangesPid =
- case proplists:get_value(<<"continuous">>, PostProps, false) of
- false ->
- spawn_link(fun() -> send_local_changes_once(Server, Source, Since) end);
- true ->
- spawn_link(fun() ->
- Self = self(),
- {ok, _} = couch_db_update_notifier:start_link(fun(Msg) ->
- local_update_notification(Self, Source#db.name, Msg) end),
- send_local_changes_forever(Server, Source, Since)
- end)
+ Filter = case get_value(<<"doc_ids">>, PostProps) of
+ undefined ->
+ ?b2l(get_value(<<"filter">>, PostProps, <<>>));
+ DocIds when is_list(DocIds) ->
+ ?DOC_IDS_FILTER_NAME
end,
+ ChangesArgs = #changes_args{
+ style = all_docs,
+ since = Since,
+ filter = Filter,
+ feed = case get_value(<<"continuous">>, PostProps, false) of
+ true ->
+ "continuous";
+ false ->
+ "normal"
+ end,
+ timeout = infinity
+ },
+ ChangesPid = spawn_link(fun() ->
+ ChangesFeedFun = couch_changes:handle_changes(
+ ChangesArgs,
+ {json_req, filter_json_req(Filter, Source, PostProps)},
+ Source
+ ),
+ ChangesFeedFun(fun({change, Change, _}, _) ->
+ gen_server:call(Server, {add_change, Change}, infinity);
+ (_, _) ->
+ ok
+ end)
+ end),
{ok, #state{changes_loop=ChangesPid, init_args=InitArgs}}.
+maybe_add_filter_qs_params(PostProps, BaseQS) ->
+ case get_value(<<"filter">>, PostProps) of
+ undefined ->
+ BaseQS;
+ FilterName ->
+ {Params} = get_value(<<"query_params">>, PostProps, {[]}),
+ lists:foldr(
+ fun({K, V}, QSAcc) ->
+ Ks = couch_util:to_list(K),
+ case proplists:is_defined(Ks, QSAcc) of
+ true ->
+ QSAcc;
+ false ->
+ [{Ks, V} | QSAcc]
+ end
+ end,
+ [{"filter", FilterName} | BaseQS],
+ Params
+ )
+ end.
+
+filter_json_req([], _Db, _PostProps) ->
+ {[]};
+filter_json_req(?DOC_IDS_FILTER_NAME, _Db, PostProps) ->
+ {[{<<"doc_ids">>, get_value(<<"doc_ids">>, PostProps)}]};
+filter_json_req(FilterName, Db, PostProps) ->
+ {Query} = get_value(<<"query_params">>, PostProps, {[]}),
+ {ok, Info} = couch_db:get_db_info(Db),
+ % simulate a request to db_name/_changes
+ {[
+ {<<"info">>, {Info}},
+ {<<"id">>, null},
+ {<<"method">>, 'GET'},
+ {<<"path">>, [couch_db:name(Db), <<"_changes">>]},
+ {<<"query">>, {[{<<"filter">>, FilterName} | Query]}},
+ {<<"headers">>, []},
+ {<<"body">>, []},
+ {<<"peer">>, <<"replicator">>},
+ {<<"form">>, []},
+ {<<"cookie">>, []},
+ {<<"userCtx">>, couch_util:json_user_ctx(Db)}
+ ]}.
+
handle_call({add_change, Row}, From, State) ->
handle_add_change(Row, From, State);
@@ -123,7 +210,7 @@ handle_cast(_Msg, State) ->
handle_info({ibrowse_async_headers, Id, Code, Hdrs}, #state{reqid=Id}=State) ->
handle_headers(list_to_integer(Code), Hdrs, State);
-handle_info({ibrowse_async_response, Id, {error,connection_closed}},
+handle_info({ibrowse_async_response, Id, {error, sel_conn_closed}},
#state{reqid=Id}=State) ->
handle_retry(State);
@@ -144,9 +231,20 @@ handle_info({'EXIT', From, Reason}, #state{changes_loop=From} = State) ->
?LOG_ERROR("changes_loop died with reason ~p", [Reason]),
{stop, changes_loop_died, State};
-handle_info(Msg, State) ->
- ?LOG_DEBUG("unexpected message at changes_feed ~p", [Msg]),
- {noreply, State}.
+handle_info({'EXIT', From, Reason}, State) ->
+ ?LOG_ERROR("changes loop, process ~p died with reason ~p", [From, Reason]),
+ {stop, {From, Reason}, State};
+
+handle_info(Msg, #state{init_args = InitArgs} = State) ->
+ case Msg of
+ changes_timeout ->
+ [_, #http_db{url = Url} | _] = InitArgs,
+ ?LOG_ERROR("changes loop timeout, no data received from ~s",
+ [couch_util:url_strip_password(Url)]);
+ _ ->
+ ?LOG_ERROR("changes loop received unexpected message ~p", [Msg])
+ end,
+ {stop, Msg, State}.
terminate(_Reason, State) ->
#state{
@@ -154,8 +252,7 @@ terminate(_Reason, State) ->
conn = Conn
} = State,
if is_pid(ChangesPid) -> exit(ChangesPid, stop); true -> ok end,
- if is_pid(Conn) -> catch ibrowse:stop_worker_process(Conn); true -> ok end,
- ok.
+ stop_link_worker(Conn).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
@@ -196,12 +293,16 @@ handle_next_changes(_From, State) ->
handle_headers(200, _, State) ->
maybe_stream_next(State),
{noreply, State};
-handle_headers(301, Hdrs, State) ->
- catch ibrowse:stop_worker_process(State#state.conn),
- Url = mochiweb_headers:get_value("Location", mochiweb_headers:make(Hdrs)),
- %% TODO use couch_httpc:request instead of start_http_request
- {Pid, ReqId} = start_http_request(Url),
- {noreply, State#state{conn=Pid, reqid=ReqId}};
+handle_headers(Code, Hdrs, #state{init_args = InitArgs} = State)
+ when Code =:= 301 ; Code =:= 302 ; Code =:= 303 ->
+ stop_link_worker(State#state.conn),
+ [Parent, Source, Since, PostProps] = InitArgs,
+ Source2 = couch_rep_httpc:redirected_request(Code, Hdrs, Source),
+ Pid2 = couch_rep_httpc:spawn_link_worker_process(Source2),
+ Source3 = Source2#http_db{conn = Pid2},
+ {ibrowse_req_id, ReqId} = couch_rep_httpc:request(Source3),
+ InitArgs2 = [Parent, Source3, Since, PostProps],
+ {noreply, State#state{conn=Pid2, reqid=ReqId, init_args=InitArgs2}};
handle_headers(Code, Hdrs, State) ->
?LOG_ERROR("replicator changes feed failed with code ~s and Headers ~n~p",
[Code,Hdrs]),
@@ -212,12 +313,10 @@ handle_messages([], State) ->
{noreply, State};
handle_messages([<<"{\"results\":[">>|Rest], State) ->
handle_messages(Rest, State);
-handle_messages([<<"]">>, <<"\"last_seq\":", LastSeqStr/binary>>], State) ->
- LastSeq = list_to_integer(?b2l(hd(re:split(LastSeqStr, "}")))),
- handle_feed_completion(State#state{last_seq = LastSeq});
-handle_messages([<<"{\"last_seq\":", LastSeqStr/binary>>], State) ->
- LastSeq = list_to_integer(?b2l(hd(re:split(LastSeqStr, "}")))),
- handle_feed_completion(State#state{last_seq = LastSeq});
+handle_messages([<<"]">>, <<"\"last_seq\":", _/binary>>], State) ->
+ handle_feed_completion(State);
+handle_messages([<<"{\"last_seq\":", _/binary>>], State) ->
+ handle_feed_completion(State);
handle_messages([Chunk|Rest], State) ->
#state{
count = Count,
@@ -230,7 +329,7 @@ handle_messages([Chunk|Rest], State) ->
#state{reply_to=nil} ->
State#state{
count = Count+1,
- last_seq = proplists:get_value(<<"seq">>, Props),
+ last_seq = couch_util:get_value(<<"seq">>, Props),
partial_chunk = <<>>,
rows=queue:in(Row,Rows)
};
@@ -277,16 +376,16 @@ by_seq_loop(Server, Source, StartSeq) ->
qs = [{limit, 1000}, {startkey, StartSeq}]
},
{Results} = couch_rep_httpc:request(Req),
- Rows = proplists:get_value(<<"rows">>, Results),
+ Rows = couch_util:get_value(<<"rows">>, Results),
if Rows =:= [] -> exit(normal); true -> ok end,
EndSeq = lists:foldl(fun({RowInfoList}, _) ->
- Id = proplists:get_value(<<"id">>, RowInfoList),
- Seq = proplists:get_value(<<"key">>, RowInfoList),
- {RowProps} = proplists:get_value(<<"value">>, RowInfoList),
+ Id = couch_util:get_value(<<"id">>, RowInfoList),
+ Seq = couch_util:get_value(<<"key">>, RowInfoList),
+ {RowProps} = couch_util:get_value(<<"value">>, RowInfoList),
RawRevs = [
- proplists:get_value(<<"rev">>, RowProps),
- proplists:get_value(<<"conflicts">>, RowProps, []),
- proplists:get_value(<<"deleted_conflicts">>, RowProps, [])
+ couch_util:get_value(<<"rev">>, RowProps),
+ couch_util:get_value(<<"conflicts">>, RowProps, []),
+ couch_util:get_value(<<"deleted_conflicts">>, RowProps, [])
],
ParsedRevs = couch_doc:parse_revs(lists:flatten(RawRevs)),
Change = {[
@@ -302,73 +401,21 @@ by_seq_loop(Server, Source, StartSeq) ->
decode_row(<<",", Rest/binary>>) ->
decode_row(Rest);
decode_row(Row) ->
- {Props} = ?JSON_DECODE(Row),
- % [Seq, Id, {<<"changes">>,C}]
- Seq = proplists:get_value(<<"seq">>, Props),
- Id = proplists:get_value(<<"id">>, Props),
- C = proplists:get_value(<<"changes">>, Props),
- C2 = [{[{<<"rev">>,couch_doc:parse_rev(R)}]} || {[{<<"rev">>,R}]} <- C],
- {[{<<"seq">>, Seq}, {<<"id">>,Id}, {<<"changes">>,C2}]}.
-
-flush_updated_messages() ->
- receive updated -> flush_updated_messages()
- after 0 -> ok
- end.
-
-local_update_notification(Self, DbName, {updated, DbName}) ->
- Self ! updated;
-local_update_notification(_, _, _) ->
- ok.
+ ?JSON_DECODE(Row).
maybe_stream_next(#state{reqid=nil}) ->
ok;
maybe_stream_next(#state{complete=false, count=N} = S) when N < ?BUFFER_SIZE ->
+ timer:cancel(get(timeout)),
+ {ok, Timeout} = timer:send_after(31000, changes_timeout),
+ put(timeout, Timeout),
ibrowse:stream_next(S#state.reqid);
maybe_stream_next(_) ->
- ok.
-
-send_local_changes_forever(Server, Db, Since) ->
- #db{name = DbName, user_ctx = UserCtx} = Db,
- {ok, NewSeq} = send_local_changes_once(Server, Db, Since),
- couch_db:close(Db),
- ok = wait_db_updated(),
- {ok, NewDb} = couch_db:open(DbName, [{user_ctx, UserCtx}]),
- send_local_changes_forever(Server, NewDb, NewSeq).
-
-send_local_changes_once(Server, Db, Since) ->
- FilterFun =
- fun(#doc_info{revs=[#rev_info{rev=Rev}|_]}) ->
- {[{<<"rev">>, Rev}]}
- end,
+ timer:cancel(get(timeout)).
- ChangesFun =
- fun([#doc_info{id=Id, high_seq=Seq}|_]=DocInfos, _) ->
- Results0 = [FilterFun(DocInfo) || DocInfo <- DocInfos],
- Results = [Result || Result <- Results0, Result /= null],
- if Results /= [] ->
- Change = {[{<<"seq">>,Seq}, {<<"id">>,Id}, {<<"changes">>,Results}]},
- gen_server:call(Server, {add_change, Change}, infinity);
- true ->
- ok
- end,
- {ok, Seq}
- end,
-
- couch_db:changes_since(Db, all_docs, Since, ChangesFun, Since).
-
-start_http_request(RawUrl) ->
- Url = ibrowse_lib:parse_url(RawUrl),
- {ok, Pid} = ibrowse:spawn_link_worker_process(Url#url.host, Url#url.port),
- Opts = [
- {stream_to, {self(), once}},
- {inactivity_timeout, 31000},
- {response_format, binary}
- ],
- {ibrowse_req_id, Id} =
- ibrowse:send_req_direct(Pid, RawUrl, [], get, [], Opts, infinity),
- {Pid, Id}.
-
-wait_db_updated() ->
- receive updated ->
- flush_updated_messages()
- end.
+stop_link_worker(Conn) when is_pid(Conn) ->
+ unlink(Conn),
+ receive {'EXIT', Conn, _} -> ok after 0 -> ok end,
+ catch ibrowse:stop_worker_process(Conn);
+stop_link_worker(_) ->
+ ok.
diff --git a/src/couchdb/couch_rep_db_listener.erl b/src/couchdb/couch_rep_db_listener.erl
new file mode 100644
index 00000000..6f1f0443
--- /dev/null
+++ b/src/couchdb/couch_rep_db_listener.erl
@@ -0,0 +1,311 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_rep_db_listener).
+-behaviour(gen_server).
+
+-export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
+-export([code_change/3, terminate/2]).
+
+-include("couch_db.hrl").
+
+-define(DOC_TO_REP_ID_MAP, rep_doc_id_to_rep_id).
+-define(REP_ID_TO_DOC_ID_MAP, rep_id_to_rep_doc_id).
+
+-record(state, {
+ changes_feed_loop = nil,
+ changes_queue = nil,
+ changes_processor = nil,
+ db_notifier = nil
+}).
+
+
+start_link() ->
+ gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
+
+init(_) ->
+ process_flag(trap_exit, true),
+ {ok, Queue} = couch_work_queue:new(
+ [{max_size, 1024 * 1024}, {max_items, 1000}]),
+ {ok, Processor} = changes_processor(Queue),
+ {ok, Loop} = changes_feed_loop(Queue),
+ Server = self(),
+ ok = couch_config:register(
+ fun("replicator", "db") ->
+ ok = gen_server:cast(Server, rep_db_changed)
+ end
+ ),
+ {ok, #state{
+ changes_feed_loop = Loop,
+ changes_queue = Queue,
+ changes_processor = Processor,
+ db_notifier = db_update_notifier()}
+ }.
+
+
+handle_call(Msg, From, State) ->
+ ?LOG_ERROR("Replicator DB listener received unexpected call ~p from ~p",
+ [Msg, From]),
+ {stop, {error, {unexpected_call, Msg}}, State}.
+
+
+handle_cast(rep_db_changed, State) ->
+ #state{
+ changes_feed_loop = Loop,
+ changes_queue = Queue
+ } = State,
+ catch unlink(Loop),
+ catch exit(Loop, rep_db_changed),
+ couch_work_queue:queue(Queue, stop_all_replications),
+ {ok, NewLoop} = changes_feed_loop(Queue),
+ {noreply, State#state{changes_feed_loop = NewLoop}};
+
+handle_cast(rep_db_created, #state{changes_feed_loop = Loop} = State) ->
+ catch unlink(Loop),
+ catch exit(Loop, rep_db_changed),
+ {ok, NewLoop} = changes_feed_loop(State#state.changes_queue),
+ {noreply, State#state{changes_feed_loop = NewLoop}};
+
+handle_cast(Msg, State) ->
+ ?LOG_ERROR("Replicator DB listener received unexpected cast ~p", [Msg]),
+ {stop, {error, {unexpected_cast, Msg}}, State}.
+
+handle_info({'EXIT', From, normal}, #state{changes_feed_loop = From} = State) ->
+ % replicator DB deleted
+ couch_work_queue:queue(State#state.changes_queue, stop_all_replications),
+ {noreply, State#state{changes_feed_loop = nil}};
+
+handle_info({'EXIT', From, Reason}, #state{db_notifier = From} = State) ->
+ ?LOG_ERROR("Database update notifier died. Reason: ~p", [Reason]),
+ {stop, {db_update_notifier_died, Reason}, State};
+
+handle_info({'EXIT', From, Reason}, #state{changes_processor = From} = State) ->
+ ?LOG_ERROR("Replicator DB changes processor died. Reason: ~p", [Reason]),
+ {stop, {rep_db_changes_processor_died, Reason}, State}.
+
+
+terminate(_Reason, State) ->
+ #state{
+ changes_feed_loop = Loop,
+ changes_queue = Queue
+ } = State,
+ exit(Loop, stop),
+ % closing the queue will cause changes_processor to shutdown
+ couch_work_queue:close(Queue),
+ ok.
+
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+
+changes_feed_loop(ChangesQueue) ->
+ {ok, RepDb} = couch_rep:ensure_rep_db_exists(),
+ Pid = spawn_link(
+ fun() ->
+ ChangesFeedFun = couch_changes:handle_changes(
+ #changes_args{
+ include_docs = true,
+ feed = "continuous",
+ timeout = infinity,
+ db_open_options = [sys_db]
+ },
+ {json_req, null},
+ RepDb
+ ),
+ ChangesFeedFun(
+ fun({change, Change, _}, _) ->
+ case has_valid_rep_id(Change) of
+ true ->
+ couch_work_queue:queue(ChangesQueue, Change);
+ false ->
+ ok
+ end;
+ (_, _) ->
+ ok
+ end
+ )
+ end
+ ),
+ couch_db:close(RepDb),
+ {ok, Pid}.
+
+
+db_update_notifier() ->
+ Server = self(),
+ {ok, Notifier} = couch_db_update_notifier:start_link(
+ fun({created, DbName}) ->
+ case ?l2b(couch_config:get("replicator", "db", "_replicator")) of
+ DbName ->
+ ok = gen_server:cast(Server, rep_db_created);
+ _ ->
+ ok
+ end;
+ (_) ->
+ ok
+ end
+ ),
+ Notifier.
+
+
+changes_processor(ChangesQueue) ->
+ Pid = spawn_link(
+ fun() ->
+ ets:new(?DOC_TO_REP_ID_MAP, [named_table, set, private]),
+ ets:new(?REP_ID_TO_DOC_ID_MAP, [named_table, set, private]),
+ consume_changes(ChangesQueue),
+ true = ets:delete(?REP_ID_TO_DOC_ID_MAP),
+ true = ets:delete(?DOC_TO_REP_ID_MAP)
+ end
+ ),
+ {ok, Pid}.
+
+
+consume_changes(ChangesQueue) ->
+ case couch_work_queue:dequeue(ChangesQueue) of
+ closed ->
+ ok;
+ {ok, Changes} ->
+ lists:foreach(fun process_change/1, Changes),
+ consume_changes(ChangesQueue)
+ end.
+
+
+has_valid_rep_id({Change}) ->
+ has_valid_rep_id(couch_util:get_value(<<"id">>, Change));
+has_valid_rep_id(<<?DESIGN_DOC_PREFIX, _Rest/binary>>) ->
+ false;
+has_valid_rep_id(_Else) ->
+ true.
+
+process_change(stop_all_replications) ->
+ ?LOG_INFO("Stopping all ongoing replications because the replicator DB "
+ "was deleted or changed", []),
+ stop_all_replications();
+
+process_change({Change}) ->
+ {RepProps} = JsonRepDoc = couch_util:get_value(doc, Change),
+ DocId = couch_util:get_value(<<"_id">>, RepProps),
+ case couch_util:get_value(<<"deleted">>, Change, false) of
+ true ->
+ rep_doc_deleted(DocId);
+ false ->
+ case couch_util:get_value(<<"_replication_state">>, RepProps) of
+ <<"completed">> ->
+ replication_complete(DocId);
+ <<"error">> ->
+ stop_replication(DocId);
+ <<"triggered">> ->
+ maybe_start_replication(DocId, JsonRepDoc);
+ undefined ->
+ maybe_start_replication(DocId, JsonRepDoc);
+ _ ->
+ ?LOG_ERROR("Invalid value for the `_replication_state` property"
+ " of the replication document `~s`", [DocId])
+ end
+ end,
+ ok.
+
+
+rep_user_ctx({RepDoc}) ->
+ case couch_util:get_value(<<"user_ctx">>, RepDoc) of
+ undefined ->
+ #user_ctx{roles = [<<"_admin">>]};
+ {UserCtx} ->
+ #user_ctx{
+ name = couch_util:get_value(<<"name">>, UserCtx, null),
+ roles = couch_util:get_value(<<"roles">>, UserCtx, [])
+ }
+ end.
+
+
+maybe_start_replication(DocId, JsonRepDoc) ->
+ UserCtx = rep_user_ctx(JsonRepDoc),
+ {BaseId, _} = RepId = couch_rep:make_replication_id(JsonRepDoc, UserCtx),
+ case ets:lookup(?REP_ID_TO_DOC_ID_MAP, BaseId) of
+ [] ->
+ true = ets:insert(?REP_ID_TO_DOC_ID_MAP, {BaseId, DocId}),
+ true = ets:insert(?DOC_TO_REP_ID_MAP, {DocId, RepId}),
+ spawn_link(fun() -> start_replication(JsonRepDoc, RepId, UserCtx) end);
+ [{BaseId, DocId}] ->
+ ok;
+ [{BaseId, OtherDocId}] ->
+ maybe_tag_rep_doc(DocId, JsonRepDoc, ?l2b(BaseId), OtherDocId)
+ end.
+
+
+maybe_tag_rep_doc(DocId, {Props} = JsonRepDoc, RepId, OtherDocId) ->
+ case couch_util:get_value(<<"_replication_id">>, Props) of
+ RepId ->
+ ok;
+ _ ->
+ ?LOG_INFO("The replication specified by the document `~s` was already"
+ " triggered by the document `~s`", [DocId, OtherDocId]),
+ couch_rep:update_rep_doc(JsonRepDoc, [{<<"_replication_id">>, RepId}])
+ end.
+
+
+
+start_replication({RepProps} = RepDoc, {Base, Ext} = RepId, UserCtx) ->
+ case (catch couch_rep:start_replication(RepDoc, RepId, UserCtx)) of
+ RepPid when is_pid(RepPid) ->
+ ?LOG_INFO("Document `~s` triggered replication `~s`",
+ [couch_util:get_value(<<"_id">>, RepProps), Base ++ Ext]),
+ couch_rep:get_result(RepPid, RepId, RepDoc, UserCtx);
+ Error ->
+ couch_rep:update_rep_doc(
+ RepDoc,
+ [
+ {<<"_replication_state">>, <<"error">>},
+ {<<"_replication_id">>, ?l2b(Base)}
+ ]
+ ),
+ ?LOG_ERROR("Error starting replication `~s`: ~p", [Base ++ Ext, Error])
+ end.
+
+rep_doc_deleted(DocId) ->
+ case stop_replication(DocId) of
+ {ok, {Base, Ext}} ->
+ ?LOG_INFO("Stopped replication `~s` because replication document `~s`"
+ " was deleted", [Base ++ Ext, DocId]);
+ none ->
+ ok
+ end.
+
+replication_complete(DocId) ->
+ case stop_replication(DocId) of
+ {ok, {Base, Ext}} ->
+ ?LOG_INFO("Replication `~s` finished (triggered by document `~s`)",
+ [Base ++ Ext, DocId]);
+ none ->
+ ok
+ end.
+
+stop_replication(DocId) ->
+ case ets:lookup(?DOC_TO_REP_ID_MAP, DocId) of
+ [{DocId, {BaseId, _} = RepId}] ->
+ couch_rep:end_replication(RepId),
+ true = ets:delete(?REP_ID_TO_DOC_ID_MAP, BaseId),
+ true = ets:delete(?DOC_TO_REP_ID_MAP, DocId),
+ {ok, RepId};
+ [] ->
+ none
+ end.
+
+stop_all_replications() ->
+ ets:foldl(
+ fun({_, RepId}, _) -> couch_rep:end_replication(RepId) end,
+ ok,
+ ?DOC_TO_REP_ID_MAP
+ ),
+ true = ets:delete_all_objects(?REP_ID_TO_DOC_ID_MAP),
+ true = ets:delete_all_objects(?DOC_TO_REP_ID_MAP).
diff --git a/src/couchdb/couch_rep_httpc.erl b/src/couchdb/couch_rep_httpc.erl
index 4944f554..7a5bd18b 100644
--- a/src/couchdb/couch_rep_httpc.erl
+++ b/src/couchdb/couch_rep_httpc.erl
@@ -14,8 +14,10 @@
-include("couch_db.hrl").
-include("../ibrowse/ibrowse.hrl").
--export([db_exists/1, db_exists/2, full_url/1, request/1, redirected_request/2,
- spawn_worker_process/1, spawn_link_worker_process/1]).
+-export([db_exists/1, db_exists/2]).
+-export([full_url/1, request/1, redirected_request/3]).
+-export([spawn_worker_process/1, spawn_link_worker_process/1]).
+-export([ssl_options/1]).
request(#http_db{} = Req) ->
do_request(Req).
@@ -34,7 +36,7 @@ do_request(Req) ->
qs = QS
} = Req,
Url = full_url(Req),
- Headers = case proplists:get_value(<<"oauth">>, Auth) of
+ Headers = case couch_util:get_value(<<"oauth">>, Auth) of
undefined ->
Headers0;
{OAuthProps} ->
@@ -46,7 +48,7 @@ do_request(Req) ->
nil ->
[];
_Else ->
- iolist_to_binary(?JSON_ENCODE(B))
+ iolist_to_binary(?JSON_ENCODE(B))
end,
Resp = case Conn of
nil ->
@@ -72,10 +74,11 @@ db_exists(Req, CanonicalUrl, CreateDB) ->
#http_db{
auth = Auth,
headers = Headers0,
+ options = Options,
url = Url
} = Req,
HeadersFun = fun(Method) ->
- case proplists:get_value(<<"oauth">>, Auth) of
+ case couch_util:get_value(<<"oauth">>, Auth) of
undefined ->
Headers0;
{OAuthProps} ->
@@ -84,25 +87,46 @@ db_exists(Req, CanonicalUrl, CreateDB) ->
end,
case CreateDB of
true ->
- catch ibrowse:send_req(Url, HeadersFun(put), put);
+ Headers = [{"Content-Length", 0} | HeadersFun(put)],
+ catch ibrowse:send_req(Url, Headers, put, [], Options);
_Else -> ok
end,
- case catch ibrowse:send_req(Url, HeadersFun(head), head) of
+ case catch ibrowse:send_req(Url, HeadersFun(head), head, [], Options) of
{ok, "200", _, _} ->
Req#http_db{url = CanonicalUrl};
{ok, "301", RespHeaders, _} ->
- MochiHeaders = mochiweb_headers:make(RespHeaders),
- RedirectUrl = mochiweb_headers:get_value("Location", MochiHeaders),
+ RedirectUrl = redirect_url(RespHeaders, Req#http_db.url),
db_exists(Req#http_db{url = RedirectUrl}, RedirectUrl);
{ok, "302", RespHeaders, _} ->
- MochiHeaders = mochiweb_headers:make(RespHeaders),
- RedirectUrl = mochiweb_headers:get_value("Location", MochiHeaders),
+ RedirectUrl = redirect_url(RespHeaders, Req#http_db.url),
db_exists(Req#http_db{url = RedirectUrl}, CanonicalUrl);
+ {ok, "303", RespHeaders, _} ->
+ RedirectUrl = redirect_url(RespHeaders, Req#http_db.url),
+ db_exists(Req#http_db{method = get, url = RedirectUrl}, CanonicalUrl);
+ {ok, "401", _, _} ->
+ throw({unauthorized, ?l2b(Url)});
Error ->
?LOG_DEBUG("DB at ~s could not be found because ~p", [Url, Error]),
throw({db_not_found, ?l2b(Url)})
end.
+redirect_url(RespHeaders, OrigUrl) ->
+ MochiHeaders = mochiweb_headers:make(RespHeaders),
+ RedUrl = mochiweb_headers:get_value("Location", MochiHeaders),
+ #url{
+ host = Host, port = Port,
+ path = Path, protocol = Proto
+ } = ibrowse_lib:parse_url(RedUrl),
+ #url{username = User, password = Passwd} = ibrowse_lib:parse_url(OrigUrl),
+ Creds = case is_list(User) andalso is_list(Passwd) of
+ true ->
+ User ++ ":" ++ Passwd ++ "@";
+ false ->
+ []
+ end,
+ atom_to_list(Proto) ++ "://" ++ Creds ++ Host ++ ":" ++
+ integer_to_list(Port) ++ Path.
+
full_url(#http_db{url=Url} = Req) when is_binary(Url) ->
full_url(Req#http_db{url = ?b2l(Url)});
@@ -115,7 +139,7 @@ full_url(Req) ->
resource = Resource,
qs = QS
} = Req,
- QStr = lists:map(fun({K,V}) -> io_lib:format("~s=~s",
+ QStr = lists:map(fun({K,V}) -> io_lib:format("~s=~s",
[couch_util:to_list(K), couch_util:to_list(V)]) end, QS),
lists:flatten([Url, Resource, "?", string:join(QStr, "&")]).
@@ -123,10 +147,8 @@ process_response({ok, Status, Headers, Body}, Req) ->
Code = list_to_integer(Status),
if Code =:= 200; Code =:= 201 ->
?JSON_DECODE(maybe_decompress(Headers, Body));
- Code =:= 301; Code =:= 302 ->
- MochiHeaders = mochiweb_headers:make(Headers),
- RedirectUrl = mochiweb_headers:get_value("Location", MochiHeaders),
- do_request(redirected_request(Req, RedirectUrl));
+ Code =:= 301; Code =:= 302 ; Code =:= 303 ->
+ do_request(redirected_request(Code, Headers, Req));
Code =:= 409 ->
throw(conflict);
Code >= 400, Code < 500 ->
@@ -156,7 +178,7 @@ process_response({error, Reason}, Req) ->
pause = Pause
} = Req,
ShortReason = case Reason of
- connection_closed ->
+ sel_conn_closed ->
connection_closed;
{'EXIT', {noproc, _}} ->
noproc;
@@ -175,16 +197,26 @@ process_response({error, Reason}, Req) ->
do_request(Req#http_db{retries = Retries-1, pause = 2*Pause})
end.
-redirected_request(Req, RedirectUrl) ->
+redirected_request(Code, Headers, Req) ->
+ RedirectUrl = redirect_url(Headers, Req#http_db.url),
{Base, QStr, _} = mochiweb_util:urlsplit_path(RedirectUrl),
QS = mochiweb_util:parse_qs(QStr),
- Hdrs = case proplists:get_value(<<"oauth">>, Req#http_db.auth) of
+ ReqHeaders = case couch_util:get_value(<<"oauth">>, Req#http_db.auth) of
undefined ->
Req#http_db.headers;
_Else ->
lists:keydelete("Authorization", 1, Req#http_db.headers)
end,
- Req#http_db{url=Base, resource="", qs=QS, headers=Hdrs}.
+ Req#http_db{
+ method = case couch_util:to_integer(Code) of
+ 303 -> get;
+ _ -> Req#http_db.method
+ end,
+ url = Base,
+ resource = "",
+ qs = QS,
+ headers = ReqHeaders
+ }.
spawn_worker_process(Req) ->
Url = ibrowse_lib:parse_url(Req#http_db.url),
@@ -192,8 +224,7 @@ spawn_worker_process(Req) ->
Pid.
spawn_link_worker_process(Req) ->
- Url = ibrowse_lib:parse_url(Req#http_db.url),
- {ok, Pid} = ibrowse_http_client:start_link(Url),
+ {ok, Pid} = ibrowse:spawn_link_worker_process(Req#http_db.url),
Pid.
maybe_decompress(Headers, Body) ->
@@ -209,11 +240,11 @@ oauth_header(Url, QS, Action, Props) ->
% erlang-oauth doesn't like iolists
QSL = [{couch_util:to_list(K), ?b2l(?l2b(couch_util:to_list(V)))} ||
{K,V} <- QS],
- ConsumerKey = ?b2l(proplists:get_value(<<"consumer_key">>, Props)),
- Token = ?b2l(proplists:get_value(<<"token">>, Props)),
- TokenSecret = ?b2l(proplists:get_value(<<"token_secret">>, Props)),
- ConsumerSecret = ?b2l(proplists:get_value(<<"consumer_secret">>, Props)),
- SignatureMethodStr = ?b2l(proplists:get_value(<<"signature_method">>, Props, <<"HMAC-SHA1">>)),
+ ConsumerKey = ?b2l(couch_util:get_value(<<"consumer_key">>, Props)),
+ Token = ?b2l(couch_util:get_value(<<"token">>, Props)),
+ TokenSecret = ?b2l(couch_util:get_value(<<"token_secret">>, Props)),
+ ConsumerSecret = ?b2l(couch_util:get_value(<<"consumer_secret">>, Props)),
+ SignatureMethodStr = ?b2l(couch_util:get_value(<<"signature_method">>, Props, <<"HMAC-SHA1">>)),
SignatureMethodAtom = case SignatureMethodStr of
"PLAINTEXT" ->
plaintext;
@@ -232,3 +263,35 @@ oauth_header(Url, QS, Action, Props) ->
Params = oauth:signed_params(Method, Url, QSL, Consumer, Token, TokenSecret)
-- QSL,
{"Authorization", "OAuth " ++ oauth_uri:params_to_header_string(Params)}.
+
+ssl_options(#http_db{url = Url}) ->
+ case ibrowse_lib:parse_url(Url) of
+ #url{protocol = https} ->
+ Depth = list_to_integer(
+ couch_config:get("replicator", "ssl_certificate_max_depth", "3")
+ ),
+ SslOpts = [{depth, Depth} |
+ case couch_config:get("replicator", "verify_ssl_certificates") of
+ "true" ->
+ ssl_verify_options(true);
+ _ ->
+ ssl_verify_options(false)
+ end],
+ [{is_ssl, true}, {ssl_options, SslOpts}];
+ #url{protocol = http} ->
+ []
+ end.
+
+ssl_verify_options(Value) ->
+ ssl_verify_options(Value, erlang:system_info(otp_release)).
+
+ssl_verify_options(true, OTPVersion) when OTPVersion >= "R14" ->
+ CAFile = couch_config:get("replicator", "ssl_trusted_certificates_file"),
+ [{verify, verify_peer}, {cacertfile, CAFile}];
+ssl_verify_options(false, OTPVersion) when OTPVersion >= "R14" ->
+ [{verify, verify_none}];
+ssl_verify_options(true, _OTPVersion) ->
+ CAFile = couch_config:get("replicator", "ssl_trusted_certificates_file"),
+ [{verify, 2}, {cacertfile, CAFile}];
+ssl_verify_options(false, _OTPVersion) ->
+ [{verify, 0}].
diff --git a/src/couchdb/couch_rep_missing_revs.erl b/src/couchdb/couch_rep_missing_revs.erl
index 5790dd71..9809ca5e 100644
--- a/src/couchdb/couch_rep_missing_revs.erl
+++ b/src/couchdb/couch_rep_missing_revs.erl
@@ -12,7 +12,7 @@
-module(couch_rep_missing_revs).
-behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-export([start_link/4, next/1, stop/1]).
@@ -24,7 +24,6 @@
-record (state, {
changes_loop,
changes_from = nil,
- target,
parent,
complete = false,
count = 0,
@@ -44,11 +43,11 @@ next(Server) ->
stop(Server) ->
gen_server:call(Server, stop).
-init([Parent, Target, ChangesFeed, _PostProps]) ->
+init([Parent, _Target, ChangesFeed, _PostProps]) ->
process_flag(trap_exit, true),
Self = self(),
- Pid = spawn_link(fun() -> changes_loop(Self, ChangesFeed, Target) end),
- {ok, #state{changes_loop=Pid, target=Target, parent=Parent}}.
+ Pid = spawn_link(fun() -> changes_loop(Self, ChangesFeed, Parent) end),
+ {ok, #state{changes_loop=Pid, parent=Parent}}.
handle_call({add_missing_revs, {HighSeq, Revs}}, From, State) ->
State#state.parent ! {update_stats, missing_revs, length(Revs)},
@@ -133,48 +132,57 @@ handle_changes_loop_exit(normal, State) ->
handle_changes_loop_exit(Reason, State) ->
{stop, Reason, State#state{changes_loop=nil}}.
-changes_loop(OurServer, SourceChangesServer, Target) ->
+changes_loop(OurServer, SourceChangesServer, Parent) ->
case couch_rep_changes_feed:next(SourceChangesServer) of
complete ->
exit(normal);
Changes ->
+ {ok, Target} = gen_server:call(Parent, get_target_db, infinity),
MissingRevs = get_missing_revs(Target, Changes),
gen_server:call(OurServer, {add_missing_revs, MissingRevs}, infinity)
end,
- changes_loop(OurServer, SourceChangesServer, Target).
+ changes_loop(OurServer, SourceChangesServer, Parent).
get_missing_revs(#http_db{}=Target, Changes) ->
- Transform = fun({[{<<"seq">>,_}, {<<"id">>,Id}, {<<"changes">>,C}]}) ->
- {Id, [couch_doc:rev_to_str(R) || {[{<<"rev">>, R}]} <- C]} end,
+ Transform = fun({Props}) ->
+ C = couch_util:get_value(<<"changes">>, Props),
+ Id = couch_util:get_value(<<"id">>, Props),
+ {Id, [R || {[{<<"rev">>, R}]} <- C]}
+ end,
IdRevsList = [Transform(Change) || Change <- Changes],
SeqDict = changes_dictionary(Changes),
- {[{<<"seq">>, HighSeq}, _, _]} = lists:last(Changes),
+ {LastProps} = lists:last(Changes),
+ HighSeq = couch_util:get_value(<<"seq">>, LastProps),
Request = Target#http_db{
resource = "_missing_revs",
method = post,
body = {IdRevsList}
},
{Resp} = couch_rep_httpc:request(Request),
- case proplists:get_value(<<"missing_revs">>, Resp) of
+ case couch_util:get_value(<<"missing_revs">>, Resp) of
{MissingRevs} ->
X = [{Id, dict:fetch(Id, SeqDict), couch_doc:parse_revs(RevStrs)} ||
{Id,RevStrs} <- MissingRevs],
{HighSeq, X};
_ ->
- exit({target_error, proplists:get_value(<<"error">>, Resp)})
+ exit({target_error, couch_util:get_value(<<"error">>, Resp)})
end;
get_missing_revs(Target, Changes) ->
- Transform = fun({[{<<"seq">>,_}, {<<"id">>,Id}, {<<"changes">>,C}]}) ->
- {Id, [R || {[{<<"rev">>, R}]} <- C]} end,
+ Transform = fun({Props}) ->
+ C = couch_util:get_value(<<"changes">>, Props),
+ Id = couch_util:get_value(<<"id">>, Props),
+ {Id, [couch_doc:parse_rev(R) || {[{<<"rev">>, R}]} <- C]}
+ end,
IdRevsList = [Transform(Change) || Change <- Changes],
SeqDict = changes_dictionary(Changes),
- {[{<<"seq">>, HighSeq}, _, _]} = lists:last(Changes),
+ {LastProps} = lists:last(Changes),
+ HighSeq = couch_util:get_value(<<"seq">>, LastProps),
{ok, Results} = couch_db:get_missing_revs(Target, IdRevsList),
{HighSeq, [{Id, dict:fetch(Id, SeqDict), Revs} || {Id, Revs, _} <- Results]}.
changes_dictionary(ChangeList) ->
- KVs = [{proplists:get_value(<<"id">>,C), proplists:get_value(<<"seq">>,C)}
+ KVs = [{couch_util:get_value(<<"id">>,C), couch_util:get_value(<<"seq">>,C)}
|| {C} <- ChangeList],
dict:from_list(KVs).
diff --git a/src/couchdb/couch_rep_reader.erl b/src/couchdb/couch_rep_reader.erl
index a66454c8..9252bc8c 100644
--- a/src/couchdb/couch_rep_reader.erl
+++ b/src/couchdb/couch_rep_reader.erl
@@ -12,12 +12,12 @@
-module(couch_rep_reader).
-behaviour(gen_server).
--export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-export([start_link/4, next/1]).
--import(couch_util, [url_encode/1]).
+-import(couch_util, [encode_doc_id/1]).
-define (BUFFER_SIZE, 1000).
-define (MAX_CONCURRENT_REQUESTS, 100).
@@ -57,7 +57,8 @@ init([Parent, Source, MissingRevs, _PostProps]) ->
ibrowse:set_max_pipeline_size(Host, Port, ?MAX_PIPELINE_SIZE);
true -> ok end,
Self = self(),
- ReaderLoop = spawn_link(fun() -> reader_loop(Self, Source, MissingRevs) end),
+ ReaderLoop = spawn_link(
+ fun() -> reader_loop(Self, Parent, Source, MissingRevs) end),
State = #state{
parent = Parent,
source = Source,
@@ -98,6 +99,8 @@ code_change(_OldVsn, State, _Extra) ->
%internal funs
+handle_add_docs(_Seq, [], _From, State) ->
+ {reply, ok, State};
handle_add_docs(Seq, DocsToAdd, From, #state{reply_to=nil} = State) ->
State1 = update_sequence_lists(Seq, State),
NewState = State1#state{
@@ -141,9 +144,13 @@ handle_open_remote_doc(Id, Seq, Revs, _, #state{source=#http_db{}} = State) ->
{_, _Ref} = spawn_document_request(Source, Id, Seq, Revs),
{reply, ok, State#state{monitor_count = Count+1}}.
-handle_monitor_down(normal, #state{pending_doc_request=nil,
+handle_monitor_down(normal, #state{pending_doc_request=nil, reply_to=nil,
monitor_count=1, complete=waiting_on_monitors} = State) ->
{noreply, State#state{complete=true, monitor_count=0}};
+handle_monitor_down(normal, #state{pending_doc_request=nil, reply_to=From,
+ monitor_count=1, complete=waiting_on_monitors} = State) ->
+ gen_server:reply(From, {complete, calculate_new_high_seq(State)}),
+ {stop, normal, State#state{complete=true, monitor_count=0}};
handle_monitor_down(normal, #state{pending_doc_request=nil} = State) ->
#state{monitor_count = Count} = State,
{noreply, State#state{monitor_count = Count-1}};
@@ -210,11 +217,11 @@ update_sequence_lists(Seq, State) ->
opened_seqs = Opened
}.
-open_doc_revs(#http_db{} = DbS, DocId, Revs) ->
+open_doc_revs(#http_db{url = Url} = DbS, DocId, Revs) ->
%% all this logic just splits up revision lists that are too long for
%% MochiWeb into multiple requests
- BaseQS = [{revs,true}, {latest,true}],
- BaseReq = DbS#http_db{resource=url_encode(DocId), qs=BaseQS},
+ BaseQS = [{revs,true}, {latest,true}, {att_encoding_info,true}],
+ BaseReq = DbS#http_db{resource=encode_doc_id(DocId), qs=BaseQS},
BaseLength = length(couch_rep_httpc:full_url(BaseReq)) + 11, % &open_revs=
{RevLists, _, _} = lists:foldl(fun split_revlist/2,
@@ -226,15 +233,22 @@ open_doc_revs(#http_db{} = DbS, DocId, Revs) ->
JsonResults = lists:flatten([couch_rep_httpc:request(R) || R <- Requests]),
Transform =
- fun({[{<<"missing">>, Rev}]}) ->
- {{not_found, missing}, couch_doc:parse_rev(Rev)};
- ({[{<<"ok">>, Json}]}) ->
+ fun({[{<<"ok">>, Json}]}, Acc) ->
#doc{id=Id, revs=Rev, atts=Atts} = Doc = couch_doc:from_json_obj(Json),
- Doc#doc{atts=[couch_rep_att:convert_stub(A, {DbS,Id,Rev}) || A <- Atts]}
+ Doc1 = Doc#doc{
+ atts=[couch_rep_att:convert_stub(A, {DbS,Id,Rev}) || A <- Atts]
+ },
+ [Doc1 | Acc];
+ ({ErrorProps}, Acc) ->
+ Err = couch_util:get_value(<<"error">>, ErrorProps,
+ ?JSON_ENCODE({ErrorProps})),
+ ?LOG_ERROR("Replicator: error accessing doc ~s at ~s, reason: ~s",
+ [DocId, couch_util:url_strip_password(Url), Err]),
+ Acc
end,
- [Transform(Result) || Result <- JsonResults].
+ lists:reverse(lists:foldl(Transform, [], JsonResults)).
-reader_loop(ReaderServer, Source, MissingRevsServer) ->
+reader_loop(ReaderServer, Parent, Source, MissingRevsServer) ->
case couch_rep_missing_revs:next(MissingRevsServer) of
complete ->
exit(complete);
@@ -247,22 +261,23 @@ reader_loop(ReaderServer, Source, MissingRevsServer) ->
#http_db{} ->
[gen_server:call(ReaderServer, {open_remote_doc, Id, Seq, Revs},
infinity) || {Id,Seq,Revs} <- SortedIdsRevs],
- reader_loop(ReaderServer, Source, MissingRevsServer);
+ reader_loop(ReaderServer, Parent, Source, MissingRevsServer);
_Local ->
- Source2 = maybe_reopen_db(Source, HighSeq),
+ {ok, Source1} = gen_server:call(Parent, get_source_db, infinity),
+ Source2 = maybe_reopen_db(Source1, HighSeq),
lists:foreach(fun({Id,Seq,Revs}) ->
{ok, Docs} = couch_db:open_doc_revs(Source2, Id, Revs, [latest]),
JustTheDocs = [Doc || {ok, Doc} <- Docs],
gen_server:call(ReaderServer, {add_docs, Seq, JustTheDocs},
infinity)
end, SortedIdsRevs),
- reader_loop(ReaderServer, Source2, MissingRevsServer)
+ couch_db:close(Source2),
+ reader_loop(ReaderServer, Parent, Source2, MissingRevsServer)
end
end.
maybe_reopen_db(#db{update_seq=OldSeq} = Db, HighSeq) when HighSeq > OldSeq ->
{ok, NewDb} = couch_db:open(Db#db.name, [{user_ctx, Db#db.user_ctx}]),
- couch_db:close(Db),
NewDb;
maybe_reopen_db(Db, _HighSeq) ->
Db.
diff --git a/src/couchdb/couch_rep_writer.erl b/src/couchdb/couch_rep_writer.erl
index b86028ce..12d6dec5 100644
--- a/src/couchdb/couch_rep_writer.erl
+++ b/src/couchdb/couch_rep_writer.erl
@@ -16,16 +16,17 @@
-include("couch_db.hrl").
-start_link(Parent, Target, Reader, _PostProps) ->
- {ok, spawn_link(fun() -> writer_loop(Parent, Reader, Target) end)}.
+start_link(Parent, _Target, Reader, _PostProps) ->
+ {ok, spawn_link(fun() -> writer_loop(Parent, Reader) end)}.
-writer_loop(Parent, Reader, Target) ->
+writer_loop(Parent, Reader) ->
case couch_rep_reader:next(Reader) of
{complete, FinalSeq} ->
Parent ! {writer_checkpoint, FinalSeq},
ok;
{HighSeq, Docs} ->
DocCount = length(Docs),
+ {ok, Target} = gen_server:call(Parent, get_target_db, infinity),
try write_docs(Target, Docs) of
{ok, []} ->
Parent ! {update_stats, docs_written, DocCount};
@@ -41,31 +42,124 @@ writer_loop(Parent, Reader, Target) ->
Parent ! {writer_checkpoint, HighSeq},
couch_rep_att:cleanup(),
couch_util:should_flush(),
- writer_loop(Parent, Reader, Target)
+ writer_loop(Parent, Reader)
end.
-write_docs(#http_db{headers = Headers} = Db, Docs) ->
- JsonDocs = [couch_doc:to_json_obj(Doc, [revs,attachments]) || Doc <- Docs],
+write_docs(#http_db{} = Db, Docs) ->
+ {DocsAtts, DocsNoAtts} = lists:partition(
+ fun(#doc{atts=[]}) -> false; (_) -> true end,
+ Docs
+ ),
+ ErrorsJson0 = write_bulk_docs(Db, DocsNoAtts),
+ ErrorsJson = lists:foldl(
+ fun(Doc, Acc) -> write_multi_part_doc(Db, Doc) ++ Acc end,
+ ErrorsJson0,
+ DocsAtts
+ ),
+ {ok, ErrorsJson};
+write_docs(Db, Docs) ->
+ couch_db:update_docs(Db, Docs, [delay_commit], replicated_changes).
+
+write_bulk_docs(_Db, []) ->
+ [];
+write_bulk_docs(#http_db{headers = Headers} = Db, Docs) ->
+ JsonDocs = [
+ couch_doc:to_json_obj(Doc, [revs, att_gzip_length]) || Doc <- Docs
+ ],
Request = Db#http_db{
resource = "_bulk_docs",
method = post,
body = {[{new_edits, false}, {docs, JsonDocs}]},
- headers = [{"x-couch-full-commit", "false"} | Headers]
+ headers = couch_util:proplist_apply_field({"Content-Type", "application/json"}, [{"X-Couch-Full-Commit", "false"} | Headers])
},
ErrorsJson = case couch_rep_httpc:request(Request) of
{FailProps} ->
- exit({target_error, proplists:get_value(<<"error">>, FailProps)});
+ exit({target_error, couch_util:get_value(<<"error">>, FailProps)});
List when is_list(List) ->
List
end,
- ErrorsList = [write_docs_1(V) || V <- ErrorsJson],
- {ok, ErrorsList};
-write_docs(Db, Docs) ->
- couch_db:update_docs(Db, Docs, [delay_commit], replicated_changes).
+ [write_docs_1(V) || V <- ErrorsJson].
+
+write_multi_part_doc(#http_db{headers=Headers} = Db, #doc{atts=Atts} = Doc) ->
+ JsonBytes = ?JSON_ENCODE(
+ couch_doc:to_json_obj(
+ Doc,
+ [follows, att_encoding_info, attachments]
+ )
+ ),
+ Boundary = couch_uuids:random(),
+ {ContentType, Len} = couch_doc:len_doc_to_multi_part_stream(
+ Boundary, JsonBytes, Atts, true
+ ),
+ StreamerPid = spawn_link(
+ fun() -> streamer_fun(Boundary, JsonBytes, Atts) end
+ ),
+ BodyFun = fun(Acc) ->
+ DataQueue = case Acc of
+ nil ->
+ StreamerPid ! {start, self()},
+ receive
+ {queue, Q} ->
+ Q
+ end;
+ Queue ->
+ Queue
+ end,
+ case couch_work_queue:dequeue(DataQueue) of
+ closed ->
+ eof;
+ {ok, Data} ->
+ {ok, iolist_to_binary(Data), DataQueue}
+ end
+ end,
+ Request = Db#http_db{
+ resource = couch_util:encode_doc_id(Doc),
+ method = put,
+ qs = [{new_edits, false}],
+ body = {BodyFun, nil},
+ headers = [
+ {"x-couch-full-commit", "false"},
+ {"Content-Type", ?b2l(ContentType)},
+ {"Content-Length", Len} | Headers
+ ]
+ },
+ Result = case couch_rep_httpc:request(Request) of
+ {[{<<"error">>, Error}, {<<"reason">>, Reason}]} ->
+ {Pos, [RevId | _]} = Doc#doc.revs,
+ ErrId = couch_util:to_existing_atom(Error),
+ [{Doc#doc.id, couch_doc:rev_to_str({Pos, RevId})}, {ErrId, Reason}];
+ _ ->
+ []
+ end,
+ StreamerPid ! stop,
+ Result.
+
+streamer_fun(Boundary, JsonBytes, Atts) ->
+ receive
+ stop ->
+ ok;
+ {start, From} ->
+ % better use a brand new queue, to ensure there's no garbage from
+ % a previous (failed) iteration
+ {ok, DataQueue} = couch_work_queue:new(
+ [{max_size, 1024 * 1024}, {max_items, 1000}]),
+ From ! {queue, DataQueue},
+ couch_doc:doc_to_multi_part_stream(
+ Boundary,
+ JsonBytes,
+ Atts,
+ fun(Data) ->
+ couch_work_queue:queue(DataQueue, Data)
+ end,
+ true
+ ),
+ couch_work_queue:close(DataQueue),
+ streamer_fun(Boundary, JsonBytes, Atts)
+ end.
write_docs_1({Props}) ->
- Id = proplists:get_value(<<"id">>, Props),
- Rev = couch_doc:parse_rev(proplists:get_value(<<"rev">>, Props)),
- ErrId = couch_util:to_existing_atom(proplists:get_value(<<"error">>, Props)),
- Reason = proplists:get_value(<<"reason">>, Props),
+ Id = couch_util:get_value(<<"id">>, Props),
+ Rev = couch_doc:parse_rev(couch_util:get_value(<<"rev">>, Props)),
+ ErrId = couch_util:to_existing_atom(couch_util:get_value(<<"error">>, Props)),
+ Reason = couch_util:get_value(<<"reason">>, Props),
{{Id, Rev}, {ErrId, Reason}}.
diff --git a/src/couchdb/couch_server.erl b/src/couchdb/couch_server.erl
index afdf9365..7870d69e 100644
--- a/src/couchdb/couch_server.erl
+++ b/src/couchdb/couch_server.erl
@@ -51,30 +51,35 @@ sup_start_link() ->
gen_server:start_link({local, couch_server}, couch_server, [], []).
open(DbName, Options) ->
- case gen_server:call(couch_server, {open, DbName, Options}) of
- {ok, MainPid} ->
- Ctx = proplists:get_value(user_ctx, Options, #user_ctx{}),
- couch_db:open_ref_counted(MainPid, Ctx);
+ case gen_server:call(couch_server, {open, DbName, Options}, infinity) of
+ {ok, Db} ->
+ Ctx = couch_util:get_value(user_ctx, Options, #user_ctx{}),
+ {ok, Db#db{user_ctx=Ctx}};
Error ->
Error
end.
create(DbName, Options) ->
- case gen_server:call(couch_server, {create, DbName, Options}) of
- {ok, MainPid} ->
- Ctx = proplists:get_value(user_ctx, Options, #user_ctx{}),
- couch_db:open_ref_counted(MainPid, Ctx);
+ case gen_server:call(couch_server, {create, DbName, Options}, infinity) of
+ {ok, Db} ->
+ Ctx = couch_util:get_value(user_ctx, Options, #user_ctx{}),
+ {ok, Db#db{user_ctx=Ctx}};
Error ->
Error
end.
delete(DbName, Options) ->
- gen_server:call(couch_server, {delete, DbName, Options}).
+ gen_server:call(couch_server, {delete, DbName, Options}, infinity).
check_dbname(#server{dbname_regexp=RegExp}, DbName) ->
case re:run(DbName, RegExp, [{capture, none}]) of
nomatch ->
- {error, illegal_database_name};
+ case DbName of
+ "_users" -> ok;
+ "_replicator" -> ok;
+ _Else ->
+ {error, illegal_database_name}
+ end;
match ->
ok
end.
@@ -104,7 +109,7 @@ hash_admin_passwords(Persist) ->
({User, ClearPassword}) ->
Salt = ?b2l(couch_uuids:random()),
Hashed = couch_util:to_hex(crypto:sha(ClearPassword ++ Salt)),
- couch_config:set("admins",
+ couch_config:set("admins",
User, "-hashed-" ++ Hashed ++ "," ++ Salt, Persist)
end, couch_config:get("admins")).
@@ -127,6 +132,7 @@ init([]) ->
gen_server:call(couch_server,
{set_max_dbs_open, list_to_integer(Max)})
end),
+ ok = couch_file:init_delete_dir(RootDir),
hash_admin_passwords(),
ok = couch_config:register(
fun("admins", _Key, _Value, Persist) ->
@@ -137,28 +143,32 @@ init([]) ->
ets:new(couch_dbs_by_name, [set, private, named_table]),
ets:new(couch_dbs_by_pid, [set, private, named_table]),
ets:new(couch_dbs_by_lru, [ordered_set, private, named_table]),
+ ets:new(couch_sys_dbs, [set, private, named_table]),
process_flag(trap_exit, true),
{ok, #server{root_dir=RootDir,
dbname_regexp=RegExp,
max_dbs_open=MaxDbsOpen,
start_time=httpd_util:rfc1123_date()}}.
-terminate(Reason, _Srv) ->
- couch_util:terminate_linked(Reason),
+terminate(_Reason, _Srv) ->
+ [couch_util:shutdown_sync(Pid) || {_, {Pid, _LruTime}} <-
+ ets:tab2list(couch_dbs_by_name)],
ok.
all_databases() ->
{ok, #server{root_dir=Root}} = gen_server:call(couch_server, get_server),
+ NormRoot = couch_util:normpath(Root),
Filenames =
filelib:fold_files(Root, "^[a-z0-9\\_\\$()\\+\\-]*[\\.]couch$", true,
fun(Filename, AccIn) ->
- case Filename -- Root of
+ NormFilename = couch_util:normpath(Filename),
+ case NormFilename -- NormRoot of
[$/ | RelativeFilename] -> ok;
RelativeFilename -> ok
end,
[list_to_binary(filename:rootname(RelativeFilename, ".couch")) | AccIn]
end, []),
- {ok, Filenames}.
+ {ok, lists:usort(Filenames)}.
maybe_close_lru_db(#server{dbs_open=NumOpen, max_dbs_open=MaxOpen}=Server)
@@ -173,27 +183,22 @@ maybe_close_lru_db(#server{dbs_open=NumOpen}=Server) ->
end.
try_close_lru(StartTime) ->
- LruTime = ets:first(couch_dbs_by_lru),
+ LruTime = get_lru(),
if LruTime > StartTime ->
% this means we've looped through all our opened dbs and found them
% all in use.
{error, all_dbs_active};
true ->
[{_, DbName}] = ets:lookup(couch_dbs_by_lru, LruTime),
- [{_, {MainPid, LruTime}}] = ets:lookup(couch_dbs_by_name, DbName),
+ [{_, {opened, MainPid, LruTime}}] = ets:lookup(couch_dbs_by_name, DbName),
case couch_db:is_idle(MainPid) of
true ->
- exit(MainPid, kill),
- receive {'EXIT', MainPid, _Reason} -> ok end,
- true = ets:delete(couch_dbs_by_lru, LruTime),
- true = ets:delete(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_pid, MainPid),
- ok;
+ ok = shutdown_idle_db(DbName, MainPid, LruTime);
false ->
% this still has referrers. Go ahead and give it a current lru time
% and try the next one in the table.
NewLruTime = now(),
- true = ets:insert(couch_dbs_by_name, {DbName, {MainPid, NewLruTime}}),
+ true = ets:insert(couch_dbs_by_name, {DbName, {opened, MainPid, NewLruTime}}),
true = ets:insert(couch_dbs_by_pid, {MainPid, DbName}),
true = ets:delete(couch_dbs_by_lru, LruTime),
true = ets:insert(couch_dbs_by_lru, {NewLruTime, DbName}),
@@ -201,98 +206,160 @@ try_close_lru(StartTime) ->
end
end.
+get_lru() ->
+ get_lru(ets:first(couch_dbs_by_lru)).
+
+get_lru(LruTime) ->
+ [{LruTime, DbName}] = ets:lookup(couch_dbs_by_lru, LruTime),
+ case ets:member(couch_sys_dbs, DbName) of
+ false ->
+ LruTime;
+ true ->
+ [{_, {opened, MainPid, _}}] = ets:lookup(couch_dbs_by_name, DbName),
+ case couch_db:is_idle(MainPid) of
+ true ->
+ NextLru = ets:next(couch_dbs_by_lru, LruTime),
+ ok = shutdown_idle_db(DbName, MainPid, LruTime),
+ get_lru(NextLru);
+ false ->
+ get_lru(ets:next(couch_dbs_by_lru, LruTime))
+ end
+ end.
+
+shutdown_idle_db(DbName, MainPid, LruTime) ->
+ couch_util:shutdown_sync(MainPid),
+ true = ets:delete(couch_dbs_by_lru, LruTime),
+ true = ets:delete(couch_dbs_by_name, DbName),
+ true = ets:delete(couch_dbs_by_pid, MainPid),
+ true = ets:delete(couch_sys_dbs, DbName),
+ ok.
+
+open_async(Server, From, DbName, Filepath, Options) ->
+ Parent = self(),
+ Opener = spawn_link(fun() ->
+ Res = couch_db:start_link(DbName, Filepath, Options),
+ gen_server:call(
+ Parent, {open_result, DbName, Res, Options}, infinity
+ ),
+ unlink(Parent),
+ case Res of
+ {ok, DbReader} ->
+ unlink(DbReader);
+ _ ->
+ ok
+ end
+ end),
+ true = ets:insert(couch_dbs_by_name, {DbName, {opening, Opener, [From]}}),
+ true = ets:insert(couch_dbs_by_pid, {Opener, DbName}),
+ DbsOpen = case lists:member(sys_db, Options) of
+ true ->
+ true = ets:insert(couch_sys_dbs, {DbName, true}),
+ Server#server.dbs_open;
+ false ->
+ Server#server.dbs_open + 1
+ end,
+ Server#server{dbs_open = DbsOpen}.
+
handle_call({set_max_dbs_open, Max}, _From, Server) ->
{reply, ok, Server#server{max_dbs_open=Max}};
handle_call(get_server, _From, Server) ->
{reply, {ok, Server}, Server};
-handle_call({open, DbName, Options}, _From, Server) ->
+handle_call({open_result, DbName, {ok, OpenedDbPid}, Options}, _From, Server) ->
+ link(OpenedDbPid),
+ [{DbName, {opening,Opener,Froms}}] = ets:lookup(couch_dbs_by_name, DbName),
+ lists:foreach(fun({FromPid,_}=From) ->
+ gen_server:reply(From,
+ catch couch_db:open_ref_counted(OpenedDbPid, FromPid))
+ end, Froms),
+ LruTime = now(),
+ true = ets:insert(couch_dbs_by_name,
+ {DbName, {opened, OpenedDbPid, LruTime}}),
+ true = ets:delete(couch_dbs_by_pid, Opener),
+ true = ets:insert(couch_dbs_by_pid, {OpenedDbPid, DbName}),
+ true = ets:insert(couch_dbs_by_lru, {LruTime, DbName}),
+ case lists:member(create, Options) of
+ true ->
+ couch_db_update_notifier:notify({created, DbName});
+ false ->
+ ok
+ end,
+ {reply, ok, Server};
+handle_call({open_result, DbName, Error, Options}, _From, Server) ->
+ [{DbName, {opening,Opener,Froms}}] = ets:lookup(couch_dbs_by_name, DbName),
+ lists:foreach(fun(From) ->
+ gen_server:reply(From, Error)
+ end, Froms),
+ true = ets:delete(couch_dbs_by_name, DbName),
+ true = ets:delete(couch_dbs_by_pid, Opener),
+ DbsOpen = case lists:member(sys_db, Options) of
+ true ->
+ true = ets:delete(couch_sys_dbs, DbName),
+ Server#server.dbs_open;
+ false ->
+ Server#server.dbs_open - 1
+ end,
+ {reply, ok, Server#server{dbs_open = DbsOpen}};
+handle_call({open, DbName, Options}, {FromPid,_}=From, Server) ->
LruTime = now(),
case ets:lookup(couch_dbs_by_name, DbName) of
[] ->
- DbNameList = binary_to_list(DbName),
- case check_dbname(Server, DbNameList) of
- ok ->
- case maybe_close_lru_db(Server) of
- {ok, Server2} ->
- Filepath = get_full_filename(Server, DbNameList),
- case couch_db:start_link(DbName, Filepath, Options) of
- {ok, MainPid} ->
- true = ets:insert(couch_dbs_by_name, {DbName, {MainPid, LruTime}}),
- true = ets:insert(couch_dbs_by_pid, {MainPid, DbName}),
- true = ets:insert(couch_dbs_by_lru, {LruTime, DbName}),
- DbsOpen = Server2#server.dbs_open + 1,
- {reply, {ok, MainPid},
- Server2#server{dbs_open=DbsOpen}};
- Error ->
- {reply, Error, Server2}
- end;
- CloseError ->
- {reply, CloseError, Server}
- end;
- Error ->
- {reply, Error, Server}
- end;
- [{_, {MainPid, PrevLruTime}}] ->
- true = ets:insert(couch_dbs_by_name, {DbName, {MainPid, LruTime}}),
+ open_db(DbName, Server, Options, From);
+ [{_, {opening, Opener, Froms}}] ->
+ true = ets:insert(couch_dbs_by_name, {DbName, {opening, Opener, [From|Froms]}}),
+ {noreply, Server};
+ [{_, {opened, MainPid, PrevLruTime}}] ->
+ true = ets:insert(couch_dbs_by_name, {DbName, {opened, MainPid, LruTime}}),
true = ets:delete(couch_dbs_by_lru, PrevLruTime),
true = ets:insert(couch_dbs_by_lru, {LruTime, DbName}),
- {reply, {ok, MainPid}, Server}
+ {reply, couch_db:open_ref_counted(MainPid, FromPid), Server}
end;
-handle_call({create, DbName, Options}, _From, Server) ->
- DbNameList = binary_to_list(DbName),
- case check_dbname(Server, DbNameList) of
- ok ->
- Filepath = get_full_filename(Server, DbNameList),
-
- case ets:lookup(couch_dbs_by_name, DbName) of
- [] ->
- case maybe_close_lru_db(Server) of
- {ok, Server2} ->
- case couch_db:start_link(DbName, Filepath, [create|Options]) of
- {ok, MainPid} ->
- LruTime = now(),
- true = ets:insert(couch_dbs_by_name,
- {DbName, {MainPid, LruTime}}),
- true = ets:insert(couch_dbs_by_pid, {MainPid, DbName}),
- true = ets:insert(couch_dbs_by_lru, {LruTime, DbName}),
- DbsOpen = Server2#server.dbs_open + 1,
- couch_db_update_notifier:notify({created, DbName}),
- {reply, {ok, MainPid},
- Server2#server{dbs_open=DbsOpen}};
- Error ->
- {reply, Error, Server2}
- end;
- CloseError ->
- {reply, CloseError, Server}
- end;
- [_AlreadyRunningDb] ->
- {reply, file_exists, Server}
- end;
- Error ->
- {reply, Error, Server}
+handle_call({create, DbName, Options}, From, Server) ->
+ case ets:lookup(couch_dbs_by_name, DbName) of
+ [] ->
+ open_db(DbName, Server, [create | Options], From);
+ [_AlreadyRunningDb] ->
+ {reply, file_exists, Server}
end;
handle_call({delete, DbName, _Options}, _From, Server) ->
DbNameList = binary_to_list(DbName),
case check_dbname(Server, DbNameList) of
ok ->
FullFilepath = get_full_filename(Server, DbNameList),
- Server2 =
+ UpdateState =
case ets:lookup(couch_dbs_by_name, DbName) of
- [] -> Server;
- [{_, {Pid, LruTime}}] ->
- exit(Pid, kill),
- receive {'EXIT', Pid, _Reason} -> ok end,
+ [] -> false;
+ [{_, {opening, Pid, Froms}}] ->
+ couch_util:shutdown_sync(Pid),
+ true = ets:delete(couch_dbs_by_name, DbName),
+ true = ets:delete(couch_dbs_by_pid, Pid),
+ [gen_server:reply(F, not_found) || F <- Froms],
+ true;
+ [{_, {opened, Pid, LruTime}}] ->
+ couch_util:shutdown_sync(Pid),
true = ets:delete(couch_dbs_by_name, DbName),
true = ets:delete(couch_dbs_by_pid, Pid),
true = ets:delete(couch_dbs_by_lru, LruTime),
- Server#server{dbs_open=Server#server.dbs_open - 1}
+ true
+ end,
+ Server2 = case UpdateState of
+ true ->
+ DbsOpen = case ets:member(couch_sys_dbs, DbName) of
+ true ->
+ true = ets:delete(couch_sys_dbs, DbName),
+ Server#server.dbs_open;
+ false ->
+ Server#server.dbs_open - 1
+ end,
+ Server#server{dbs_open = DbsOpen};
+ false ->
+ Server
end,
%% Delete any leftover .compact files. If we don't do this a subsequent
%% request for this DB will try to open the .compact file and use it.
- file:delete(FullFilepath ++ ".compact"),
+ couch_file:delete(Server#server.root_dir, FullFilepath ++ ".compact"),
- case file:delete(FullFilepath) of
+ case couch_file:delete(Server#server.root_dir, FullFilepath) of
ok ->
couch_db_update_notifier:notify({deleted, DbName}),
{reply, ok, Server2};
@@ -310,15 +377,29 @@ handle_cast(Msg, _Server) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
-
-handle_info({'EXIT', _Pid, config_change}, _Server) ->
- exit(kill);
-handle_info({'EXIT', Pid, _Reason}, #server{dbs_open=DbsOpen}=Server) ->
- [{Pid, DbName}] = ets:lookup(couch_dbs_by_pid, Pid),
- [{DbName, {Pid, LruTime}}] = ets:lookup(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_pid, Pid),
- true = ets:delete(couch_dbs_by_name, DbName),
- true = ets:delete(couch_dbs_by_lru, LruTime),
- {noreply, Server#server{dbs_open=DbsOpen - 1}};
-handle_info(Info, _Server) ->
- exit({unknown_message, Info}).
+
+handle_info({'EXIT', _Pid, config_change}, Server) ->
+ {noreply, shutdown, Server};
+handle_info(Error, _Server) ->
+ ?LOG_ERROR("Unexpected message, restarting couch_server: ~p", [Error]),
+ exit(kill).
+
+open_db(DbName, Server, Options, From) ->
+ DbNameList = binary_to_list(DbName),
+ case check_dbname(Server, DbNameList) of
+ ok ->
+ Filepath = get_full_filename(Server, DbNameList),
+ case lists:member(sys_db, Options) of
+ true ->
+ {noreply, open_async(Server, From, DbName, Filepath, Options)};
+ false ->
+ case maybe_close_lru_db(Server) of
+ {ok, Server2} ->
+ {noreply, open_async(Server2, From, DbName, Filepath, Options)};
+ CloseError ->
+ {reply, CloseError, Server}
+ end
+ end;
+ Error ->
+ {reply, Error, Server}
+ end.
diff --git a/src/couchdb/couch_server_sup.erl b/src/couchdb/couch_server_sup.erl
index d89e987d..4f0445da 100644
--- a/src/couchdb/couch_server_sup.erl
+++ b/src/couchdb/couch_server_sup.erl
@@ -32,12 +32,7 @@ start_link(IniFiles) ->
end.
restart_core_server() ->
- supervisor:terminate_child(couch_primary_services, couch_server),
- supervisor:terminate_child(couch_secondary_services, stats_aggregator),
- supervisor:terminate_child(couch_secondary_services, stats_collector),
- supervisor:restart_child(couch_primary_services, couch_server),
- supervisor:restart_child(couch_secondary_services, stats_collector),
- supervisor:restart_child(couch_secondary_services, stats_aggregator).
+ init:restart().
couch_config_start_link_wrapper(IniFiles, FirstConfigPid) ->
case is_process_alive(FirstConfigPid) of
@@ -127,6 +122,13 @@ start_server(IniFiles) ->
Port = mochiweb_socket_server:get(couch_httpd, port),
io:format("Apache CouchDB has started. Time to relax.~n"),
?LOG_INFO("Apache CouchDB has started on http://~s:~w/", [Ip, Port]),
+
+ case couch_config:get("couchdb", "uri_file", null) of
+ null -> ok;
+ UriFile ->
+ Line = io_lib:format("http://~s:~w/~n", [Ip, Port]),
+ file:write_file(UriFile, Line)
+ end,
{ok, Pid}.
@@ -174,7 +176,7 @@ start_secondary_services() ->
{list_to_atom(Name),
{Module, Fun, Args},
permanent,
- brutal_kill,
+ 1000,
worker,
[Module]}
end
diff --git a/src/couchdb/couch_stats_collector.erl b/src/couchdb/couch_stats_collector.erl
index 59d62a6e..f7b9bb48 100644
--- a/src/couchdb/couch_stats_collector.erl
+++ b/src/couchdb/couch_stats_collector.erl
@@ -60,7 +60,7 @@ increment(Key) ->
Key2 = make_key(Key),
case catch ets:update_counter(?HIT_TABLE, Key2, 1) of
{'EXIT', {badarg, _}} ->
- true = ets:insert(?HIT_TABLE, {Key2, 1}),
+ catch ets:insert(?HIT_TABLE, {Key2, 1}),
ok;
_ ->
ok
@@ -70,16 +70,16 @@ decrement(Key) ->
Key2 = make_key(Key),
case catch ets:update_counter(?HIT_TABLE, Key2, -1) of
{'EXIT', {badarg, _}} ->
- true = ets:insert(?HIT_TABLE, {Key2, -1}),
+ catch ets:insert(?HIT_TABLE, {Key2, -1}),
ok;
_ -> ok
end.
record(Key, Value) ->
- true = ets:insert(?ABS_TABLE, {make_key(Key), Value}).
+ catch ets:insert(?ABS_TABLE, {make_key(Key), Value}).
clear(Key) ->
- true = ets:delete(?ABS_TABLE, make_key(Key)).
+ catch ets:delete(?ABS_TABLE, make_key(Key)).
track_process_count(Stat) ->
track_process_count(self(), Stat).
diff --git a/src/couchdb/couch_stream.erl b/src/couchdb/couch_stream.erl
index 2a873e4c..60af1c2b 100644
--- a/src/couchdb/couch_stream.erl
+++ b/src/couchdb/couch_stream.erl
@@ -24,7 +24,7 @@
-define(DEFAULT_STREAM_CHUNK, 16#00100000). % 1 meg chunks when streaming data
--export([open/1, close/1, write/2, foldl/4, foldl/5,
+-export([open/1, open/3, close/1, write/2, foldl/4, foldl/5, range_foldl/6, foldl_decode/6,
old_foldl/5,old_copy_to_new_stream/4]).
-export([copy_to_new_stream/3,old_read_term/2]).
-export([init/1, terminate/2, handle_call/3]).
@@ -39,14 +39,23 @@
buffer_len = 0,
max_buffer = 4096,
written_len = 0,
- md5
+ md5,
+ % md5 of the content without any transformation applied (e.g. compression)
+ % needed for the attachment upload integrity check (ticket 558)
+ identity_md5,
+ identity_len = 0,
+ encoding_fun,
+ end_encoding_fun
}).
%%% Interface functions %%%
open(Fd) ->
- gen_server:start_link(couch_stream, Fd, []).
+ open(Fd, identity, []).
+
+open(Fd, Encoding, Options) ->
+ gen_server:start_link(couch_stream, {Fd, Encoding, Options}, []).
close(Pid) ->
gen_server:call(Pid, close, infinity).
@@ -85,19 +94,124 @@ foldl(Fd, [Pos|Rest], Fun, Acc) ->
foldl(Fd, PosList, <<>>, Fun, Acc) ->
foldl(Fd, PosList, Fun, Acc);
foldl(Fd, PosList, Md5, Fun, Acc) ->
- foldl(Fd, PosList, Md5, erlang:md5_init(), Fun, Acc).
-
+ foldl(Fd, PosList, Md5, couch_util:md5_init(), Fun, Acc).
+
+foldl_decode(Fd, PosList, Md5, Enc, Fun, Acc) ->
+ {DecDataFun, DecEndFun} = case Enc of
+ gzip ->
+ ungzip_init();
+ identity ->
+ identity_enc_dec_funs()
+ end,
+ Result = foldl_decode(
+ DecDataFun, Fd, PosList, Md5, couch_util:md5_init(), Fun, Acc
+ ),
+ DecEndFun(),
+ Result.
foldl(_Fd, [], Md5, Md5Acc, _Fun, Acc) ->
- Md5 = erlang:md5_final(Md5Acc),
+ Md5 = couch_util:md5_final(Md5Acc),
Acc;
+foldl(Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) -> % 0110 UPGRADE CODE
+ foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc);
foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
{ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- Md5 = erlang:md5_final(erlang:md5_update(Md5Acc, Bin)),
+ Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, Bin)),
Fun(Bin, Acc);
+foldl(Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
+ foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
{ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- foldl(Fd, Rest, Md5, erlang:md5_update(Md5Acc, Bin), Fun, Fun(Bin, Acc)).
+ foldl(Fd, Rest, Md5, couch_util:md5_update(Md5Acc, Bin), Fun, Fun(Bin, Acc)).
+
+range_foldl(Fd, PosList, From, To, Fun, Acc) ->
+ range_foldl(Fd, PosList, From, To, 0, Fun, Acc).
+
+range_foldl(_Fd, _PosList, _From, To, Off, _Fun, Acc) when Off >= To ->
+ Acc;
+range_foldl(Fd, [Pos|Rest], From, To, Off, Fun, Acc) when is_integer(Pos) -> % old-style attachment
+ {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
+ range_foldl(Fd, [{Pos, iolist_size(Bin)}] ++ Rest, From, To, Off, Fun, Acc);
+range_foldl(Fd, [{_Pos, Size}|Rest], From, To, Off, Fun, Acc) when From > Off + Size ->
+ range_foldl(Fd, Rest, From, To, Off + Size, Fun, Acc);
+range_foldl(Fd, [{Pos, Size}|Rest], From, To, Off, Fun, Acc) ->
+ {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
+ Bin1 = if
+ From =< Off andalso To >= Off + Size -> Bin; %% the whole block is covered
+ true ->
+ PrefixLen = clip(From - Off, 0, Size),
+ PostfixLen = clip(Off + Size - To, 0, Size),
+ MatchLen = Size - PrefixLen - PostfixLen,
+ <<_Prefix:PrefixLen/binary,Match:MatchLen/binary,_Postfix:PostfixLen/binary>> = iolist_to_binary(Bin),
+ Match
+ end,
+ range_foldl(Fd, Rest, From, To, Off + Size, Fun, Fun(Bin1, Acc)).
+
+clip(Value, Lo, Hi) ->
+ if
+ Value < Lo -> Lo;
+ Value > Hi -> Hi;
+ true -> Value
+ end.
+
+foldl_decode(_DecFun, _Fd, [], Md5, Md5Acc, _Fun, Acc) ->
+ Md5 = couch_util:md5_final(Md5Acc),
+ Acc;
+foldl_decode(DecFun, Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) ->
+ foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc);
+foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
+ {ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
+ Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, EncBin)),
+ Bin = DecFun(EncBin),
+ Fun(Bin, Acc);
+foldl_decode(DecFun, Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
+ foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
+foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
+ {ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
+ Bin = DecFun(EncBin),
+ Md5Acc2 = couch_util:md5_update(Md5Acc, EncBin),
+ foldl_decode(DecFun, Fd, Rest, Md5, Md5Acc2, Fun, Fun(Bin, Acc)).
+
+gzip_init(Options) ->
+ case couch_util:get_value(compression_level, Options, 0) of
+ Lvl when Lvl >= 1 andalso Lvl =< 9 ->
+ Z = zlib:open(),
+ % 15 = ?MAX_WBITS (defined in the zlib module)
+ % the 16 + ?MAX_WBITS formula was obtained by inspecting zlib:gzip/1
+ ok = zlib:deflateInit(Z, Lvl, deflated, 16 + 15, 8, default),
+ {
+ fun(Data) ->
+ zlib:deflate(Z, Data)
+ end,
+ fun() ->
+ Last = zlib:deflate(Z, [], finish),
+ ok = zlib:deflateEnd(Z),
+ ok = zlib:close(Z),
+ Last
+ end
+ };
+ _ ->
+ identity_enc_dec_funs()
+ end.
+
+ungzip_init() ->
+ Z = zlib:open(),
+ zlib:inflateInit(Z, 16 + 15),
+ {
+ fun(Data) ->
+ zlib:inflate(Z, Data)
+ end,
+ fun() ->
+ ok = zlib:inflateEnd(Z),
+ ok = zlib:close(Z)
+ end
+ }.
+
+identity_enc_dec_funs() ->
+ {
+ fun(Data) -> Data end,
+ fun() -> [] end
+ }.
write(_Pid, <<>>) ->
ok;
@@ -105,8 +219,21 @@ write(Pid, Bin) ->
gen_server:call(Pid, {write, Bin}, infinity).
-init(Fd) ->
- {ok, #stream{fd=Fd, md5=erlang:md5_init()}}.
+init({Fd, Encoding, Options}) ->
+ {EncodingFun, EndEncodingFun} = case Encoding of
+ identity ->
+ identity_enc_dec_funs();
+ gzip ->
+ gzip_init(Options)
+ end,
+ {ok, #stream{
+ fd=Fd,
+ md5=couch_util:md5_init(),
+ identity_md5=couch_util:md5_init(),
+ encoding_fun=EncodingFun,
+ end_encoding_fun=EndEncodingFun
+ }
+ }.
terminate(_Reason, _Stream) ->
ok.
@@ -120,39 +247,65 @@ handle_call({write, Bin}, _From, Stream) ->
buffer_len = BufferLen,
buffer_list = Buffer,
max_buffer = Max,
- md5 = Md5} = Stream,
+ md5 = Md5,
+ identity_md5 = IdenMd5,
+ identity_len = IdenLen,
+ encoding_fun = EncodingFun} = Stream,
if BinSize + BufferLen > Max ->
WriteBin = lists:reverse(Buffer, [Bin]),
- Md5_2 = erlang:md5_update(Md5, WriteBin),
- {ok, Pos} = couch_file:append_binary(Fd, WriteBin),
+ IdenMd5_2 = couch_util:md5_update(IdenMd5, WriteBin),
+ case EncodingFun(WriteBin) of
+ [] ->
+ % case where the encoder did some internal buffering
+ % (zlib does it for example)
+ WrittenLen2 = WrittenLen,
+ Md5_2 = Md5,
+ Written2 = Written;
+ WriteBin2 ->
+ {ok, Pos} = couch_file:append_binary(Fd, WriteBin2),
+ WrittenLen2 = WrittenLen + iolist_size(WriteBin2),
+ Md5_2 = couch_util:md5_update(Md5, WriteBin2),
+ Written2 = [{Pos, iolist_size(WriteBin2)}|Written]
+ end,
+
{reply, ok, Stream#stream{
- written_len=WrittenLen + BufferLen + BinSize,
- written_pointers=[Pos|Written],
+ written_len=WrittenLen2,
+ written_pointers=Written2,
buffer_list=[],
buffer_len=0,
- md5=Md5_2}};
+ md5=Md5_2,
+ identity_md5=IdenMd5_2,
+ identity_len=IdenLen + BinSize}};
true ->
{reply, ok, Stream#stream{
buffer_list=[Bin|Buffer],
- buffer_len=BufferLen + BinSize}}
+ buffer_len=BufferLen + BinSize,
+ identity_len=IdenLen + BinSize}}
end;
handle_call(close, _From, Stream) ->
#stream{
fd = Fd,
written_len = WrittenLen,
written_pointers = Written,
- buffer_len = BufferLen,
buffer_list = Buffer,
- md5 = Md5} = Stream,
-
- case Buffer of
+ md5 = Md5,
+ identity_md5 = IdenMd5,
+ identity_len = IdenLen,
+ encoding_fun = EncodingFun,
+ end_encoding_fun = EndEncodingFun} = Stream,
+
+ WriteBin = lists:reverse(Buffer),
+ IdenMd5Final = couch_util:md5_final(couch_util:md5_update(IdenMd5, WriteBin)),
+ WriteBin2 = EncodingFun(WriteBin) ++ EndEncodingFun(),
+ Md5Final = couch_util:md5_final(couch_util:md5_update(Md5, WriteBin2)),
+ Result = case WriteBin2 of
[] ->
- Result = {lists:reverse(Written), WrittenLen, erlang:md5_final(Md5)};
+ {lists:reverse(Written), WrittenLen, IdenLen, Md5Final, IdenMd5Final};
_ ->
- WriteBin = lists:reverse(Buffer),
- Md5Final = erlang:md5_final(erlang:md5_update(Md5, WriteBin)),
- {ok, Pos} = couch_file:append_binary(Fd, WriteBin),
- Result = {lists:reverse(Written, [Pos]), WrittenLen + BufferLen, Md5Final}
+ {ok, Pos} = couch_file:append_binary(Fd, WriteBin2),
+ StreamInfo = lists:reverse(Written, [{Pos, iolist_size(WriteBin2)}]),
+ StreamLen = WrittenLen + iolist_size(WriteBin2),
+ {StreamInfo, StreamLen, IdenLen, Md5Final, IdenMd5Final}
end,
{stop, normal, Result, Stream}.
diff --git a/src/couchdb/couch_util.erl b/src/couchdb/couch_util.erl
index 6edfb781..c11ba994 100644
--- a/src/couchdb/couch_util.erl
+++ b/src/couchdb/couch_util.erl
@@ -12,18 +12,25 @@
-module(couch_util).
--export([priv_dir/0, start_driver/1,terminate_linked/1]).
+-export([priv_dir/0, start_driver/1, normpath/1]).
-export([should_flush/0, should_flush/1, to_existing_atom/1]).
-export([rand32/0, implode/2, collate/2, collate/3]).
--export([abs_pathname/1,abs_pathname/2, trim/1, ascii_lower/1]).
--export([encodeBase64/1, decodeBase64/1, encodeBase64Url/1, decodeBase64Url/1,
- to_hex/1,parse_term/1, dict_find/3]).
--export([file_read_size/1, get_nested_json_value/2, json_user_ctx/1]).
+-export([abs_pathname/1,abs_pathname/2, trim/1]).
+-export([encodeBase64Url/1, decodeBase64Url/1]).
+-export([to_hex/1, parse_term/1, dict_find/3]).
+-export([get_nested_json_value/2, json_user_ctx/1]).
+-export([proplist_apply_field/2, json_apply_field/2]).
-export([to_binary/1, to_integer/1, to_list/1, url_encode/1]).
-export([json_encode/1, json_decode/1]).
+-export([verify/2,simple_call/2,shutdown_sync/1]).
+-export([compressible_att_type/1]).
+-export([get_value/2, get_value/3]).
+-export([md5/1, md5_init/0, md5_update/2, md5_final/1]).
+-export([reorder_results/2]).
+-export([url_strip_password/1]).
+-export([encode_doc_id/1]).
-include("couch_db.hrl").
--include_lib("kernel/include/file.hrl").
% arbitrarily chosen amount of memory to use before flushing to disk
-define(FLUSH_MAX_MEM, 10000000).
@@ -48,23 +55,57 @@ start_driver(LibDir) ->
exit(erl_ddll:format_error(Error))
end.
+% Normalize a pathname by removing .. and . components.
+normpath(Path) ->
+ normparts(filename:split(Path), []).
+
+normparts([], Acc) ->
+ filename:join(lists:reverse(Acc));
+normparts([".." | RestParts], [_Drop | RestAcc]) ->
+ normparts(RestParts, RestAcc);
+normparts(["." | RestParts], Acc) ->
+ normparts(RestParts, Acc);
+normparts([Part | RestParts], Acc) ->
+ normparts(RestParts, [Part | Acc]).
+
% works like list_to_existing_atom, except can be list or binary and it
% gives you the original value instead of an error if no existing atom.
to_existing_atom(V) when is_list(V) ->
- try list_to_existing_atom(V) catch _ -> V end;
+ try list_to_existing_atom(V) catch _:_ -> V end;
to_existing_atom(V) when is_binary(V) ->
- try list_to_existing_atom(?b2l(V)) catch _ -> V end;
+ try list_to_existing_atom(?b2l(V)) catch _:_ -> V end;
to_existing_atom(V) when is_atom(V) ->
V.
+shutdown_sync(Pid) when not is_pid(Pid)->
+ ok;
+shutdown_sync(Pid) ->
+ MRef = erlang:monitor(process, Pid),
+ try
+ catch unlink(Pid),
+ catch exit(Pid, shutdown),
+ receive
+ {'DOWN', MRef, _, _, _} ->
+ ok
+ end
+ after
+ erlang:demonitor(MRef, [flush])
+ end.
+
-terminate_linked(normal) ->
- terminate_linked(shutdown);
-terminate_linked(Reason) ->
- {links, Links} = process_info(self(), links),
- [catch exit(Pid, Reason) || Pid <- Links],
- ok.
-
+simple_call(Pid, Message) ->
+ MRef = erlang:monitor(process, Pid),
+ try
+ Pid ! {self(), Message},
+ receive
+ {Pid, Result} ->
+ Result;
+ {'DOWN', MRef, _, _, Reason} ->
+ exit(Reason)
+ end
+ after
+ erlang:demonitor(MRef, [flush])
+ end.
to_hex([]) ->
[];
@@ -83,9 +124,19 @@ parse_term(List) ->
{ok, Tokens, _} = erl_scan:string(List ++ "."),
erl_parse:parse_term(Tokens).
+get_value(Key, List) ->
+ get_value(Key, List, undefined).
+
+get_value(Key, List, Default) ->
+ case lists:keysearch(Key, 1, List) of
+ {value, {Key,Value}} ->
+ Value;
+ false ->
+ Default
+ end.
get_nested_json_value({Props}, [Key|Keys]) ->
- case proplists:get_value(Key, Props, nil) of
+ case couch_util:get_value(Key, Props, nil) of
nil -> throw({not_found, <<"missing json key: ", Key/binary>>});
Value -> get_nested_json_value(Value, Keys)
end;
@@ -94,6 +145,19 @@ get_nested_json_value(Value, []) ->
get_nested_json_value(_NotJSONObj, _) ->
throw({not_found, json_mismatch}).
+proplist_apply_field(H, L) ->
+ {R} = json_apply_field(H, {L}),
+ R.
+
+json_apply_field(H, {L}) ->
+ json_apply_field(H, L, []).
+json_apply_field({Key, NewValue}, [{Key, _OldVal} | Headers], Acc) ->
+ json_apply_field({Key, NewValue}, Headers, Acc);
+json_apply_field({Key, NewValue}, [{OtherKey, OtherVal} | Headers], Acc) ->
+ json_apply_field({Key, NewValue}, Headers, [{OtherKey, OtherVal} | Acc]);
+json_apply_field({Key, NewValue}, [], Acc) ->
+ {[{Key, NewValue}|Acc]}.
+
json_user_ctx(#db{name=DbName, user_ctx=Ctx}) ->
{[{<<"db">>, DbName},
{<<"name">>,Ctx#user_ctx.name},
@@ -140,18 +204,6 @@ separate_cmd_args(" " ++ Rest, CmdAcc) ->
separate_cmd_args([Char|Rest], CmdAcc) ->
separate_cmd_args(Rest, [Char | CmdAcc]).
-% lowercases string bytes that are the ascii characters A-Z.
-% All other characters/bytes are ignored.
-ascii_lower(String) ->
- ascii_lower(String, []).
-
-ascii_lower([], Acc) ->
- lists:reverse(Acc);
-ascii_lower([Char | RestString], Acc) when Char >= $A, Char =< $B ->
- ascii_lower(RestString, [Char + ($a-$A) | Acc]);
-ascii_lower([Char | RestString], Acc) ->
- ascii_lower(RestString, [Char | Acc]).
-
% Is a character whitespace?
is_whitespace($\s) -> true;
is_whitespace($\t) -> true;
@@ -230,114 +282,18 @@ should_flush(MemThreshHold) ->
ProcMem2+BinMem2 > MemThreshHold;
true -> false end.
+encodeBase64Url(Url) ->
+ Url1 = iolist_to_binary(re:replace(base64:encode(Url), "=+$", "")),
+ Url2 = iolist_to_binary(re:replace(Url1, "/", "_", [global])),
+ iolist_to_binary(re:replace(Url2, "\\+", "-", [global])).
-%%% Purpose : Base 64 encoding and decoding.
-%%% Copied from ssl_base_64 to avoid using the
-%%% erlang ssl library
-
--define(st(X,A), ((X-A+256) div 256)).
-
-%% A PEM encoding consists of characters A-Z, a-z, 0-9, +, / and
-%% =. Each character encodes a 6 bits value from 0 to 63 (A = 0, / =
-%% 63); = is a padding character.
-%%
-
-%%
-%% encode64(Bytes|Binary) -> binary
-%%
-%% Take 3 bytes a time (3 x 8 = 24 bits), and make 4 characters out of
-%% them (4 x 6 = 24 bits).
-%%
-encodeBase64(Bs) when is_list(Bs) ->
- encodeBase64(iolist_to_binary(Bs), <<>>);
-encodeBase64(Bs) ->
- encodeBase64(Bs, <<>>).
-
-encodeBase64(<<B:3/binary, Bs/binary>>, Acc) ->
- <<C1:6, C2:6, C3:6, C4:6>> = B,
- encodeBase64(Bs, <<Acc/binary, (enc(C1)), (enc(C2)), (enc(C3)), (enc(C4))>>);
-encodeBase64(<<B:2/binary>>, Acc) ->
- <<C1:6, C2:6, C3:6, _:6>> = <<B/binary, 0>>,
- <<Acc/binary, (enc(C1)), (enc(C2)), (enc(C3)), $=>>;
-encodeBase64(<<B:1/binary>>, Acc) ->
- <<C1:6, C2:6, _:12>> = <<B/binary, 0, 0>>,
- <<Acc/binary, (enc(C1)), (enc(C2)), $=, $=>>;
-encodeBase64(<<>>, Acc) ->
- Acc.
-
-encodeBase64Url(Bs) when is_list(Bs) ->
- encodeBase64Url(list_to_binary(Bs), <<>>);
-encodeBase64Url(Bs) ->
- encodeBase64Url(Bs, <<>>).
-
-encodeBase64Url(<<B:3/binary, Bs/binary>>, Acc) ->
- <<C1:6, C2:6, C3:6, C4:6>> = B,
- encodeBase64Url(Bs, <<Acc/binary, (encUrl(C1)), (encUrl(C2)), (encUrl(C3)), (encUrl(C4))>>);
-encodeBase64Url(<<B:2/binary>>, Acc) ->
- <<C1:6, C2:6, C3:6, _:6>> = <<B/binary, 0>>,
- <<Acc/binary, (encUrl(C1)), (encUrl(C2)), (encUrl(C3))>>;
-encodeBase64Url(<<B:1/binary>>, Acc) ->
- <<C1:6, C2:6, _:12>> = <<B/binary, 0, 0>>,
- <<Acc/binary, (encUrl(C1)), (encUrl(C2))>>;
-encodeBase64Url(<<>>, Acc) ->
- Acc.
-
-%%
-%% decodeBase64(BinaryChars) -> Binary
-%%
-decodeBase64(Cs) when is_list(Cs) ->
- decodeBase64(list_to_binary(Cs));
-decodeBase64(Cs) ->
- decode1(Cs, <<>>).
-
-decode1(<<C1, C2, $=, $=>>, Acc) ->
- <<B1, _:16>> = <<(dec(C1)):6, (dec(C2)):6, 0:12>>,
- <<Acc/binary, B1>>;
-decode1(<<C1, C2, C3, $=>>, Acc) ->
- <<B1, B2, _:8>> = <<(dec(C1)):6, (dec(C2)):6, (dec(C3)):6, (dec(0)):6>>,
- <<Acc/binary, B1, B2>>;
-decode1(<<C1, C2, C3, C4, Cs/binary>>, Acc) ->
- Bin = <<Acc/binary, (dec(C1)):6, (dec(C2)):6, (dec(C3)):6, (dec(C4)):6>>,
- decode1(Cs, Bin);
-decode1(<<>>, Acc) ->
- Acc.
-
-decodeBase64Url(Cs) when is_list(Cs) ->
- decodeBase64Url(list_to_binary(Cs));
-decodeBase64Url(Cs) ->
- decode1Url(Cs, <<>>).
-
-decode1Url(<<C1, C2>>, Acc) ->
- <<B1, _:16>> = <<(decUrl(C1)):6, (decUrl(C2)):6, 0:12>>,
- <<Acc/binary, B1>>;
-decode1Url(<<C1, C2, C3>>, Acc) ->
- <<B1, B2, _:8>> = <<(decUrl(C1)):6, (decUrl(C2)):6, (decUrl(C3)):6, (decUrl(0)):6>>,
- <<Acc/binary, B1, B2>>;
-decode1Url(<<C1, C2, C3, C4, Cs/binary>>, Acc) ->
- Bin = <<Acc/binary, (decUrl(C1)):6, (decUrl(C2)):6, (decUrl(C3)):6, (decUrl(C4)):6>>,
- decode1Url(Cs, Bin);
-decode1Url(<<>>, Acc) ->
- Acc.
-
-%% enc/1 and dec/1
-%%
-%% Mapping: 0-25 -> A-Z, 26-51 -> a-z, 52-61 -> 0-9, 62 -> +, 63 -> /
-%%
-enc(C) ->
- 65 + C + 6*?st(C,26) - 75*?st(C,52) -15*?st(C,62) + 3*?st(C,63).
-
-dec(C) ->
- 62*?st(C,43) + ?st(C,47) + (C-59)*?st(C,48) - 69*?st(C,65) - 6*?st(C,97).
-
-%% encUrl/1 and decUrl/1
-%%
-%% Mapping: 0-25 -> A-Z, 26-51 -> a-z, 52-61 -> 0-9, 62 -> -, 63 -> _
-%%
-encUrl(C) ->
- 65 + C + 6*?st(C,26) - 75*?st(C,52) -13*?st(C,62) + 49*?st(C,63).
-
-decUrl(C) ->
- 62*?st(C,45) + (C-58)*?st(C,48) - 69*?st(C,65) + 33*?st(C,95) - 39*?st(C,97).
+decodeBase64Url(Url64) ->
+ Url1 = re:replace(iolist_to_binary(Url64), "-", "+", [global]),
+ Url2 = iolist_to_binary(
+ re:replace(iolist_to_binary(Url1), "_", "/", [global])
+ ),
+ Padding = ?l2b(lists:duplicate((4 - size(Url2) rem 4) rem 4, $=)),
+ base64:decode(<<Url2/binary, Padding/binary>>).
dict_find(Key, Dict, DefaultValue) ->
case dict:find(Key, Dict) of
@@ -347,14 +303,6 @@ dict_find(Key, Dict, DefaultValue) ->
DefaultValue
end.
-
-file_read_size(FileName) ->
- case file:read_file_info(FileName) of
- {ok, FileInfo} ->
- FileInfo#file_info.size;
- Error -> Error
- end.
-
to_binary(V) when is_binary(V) ->
V;
to_binary(V) when is_list(V) ->
@@ -423,3 +371,76 @@ json_decode(V) ->
_Type:_Error ->
throw({invalid_json,V})
end.
+
+verify([X|RestX], [Y|RestY], Result) ->
+ verify(RestX, RestY, (X bxor Y) bor Result);
+verify([], [], Result) ->
+ Result == 0.
+
+verify(<<X/binary>>, <<Y/binary>>) ->
+ verify(?b2l(X), ?b2l(Y));
+verify(X, Y) when is_list(X) and is_list(Y) ->
+ case length(X) == length(Y) of
+ true ->
+ verify(X, Y, 0);
+ false ->
+ false
+ end;
+verify(_X, _Y) -> false.
+
+compressible_att_type(MimeType) when is_binary(MimeType) ->
+ compressible_att_type(?b2l(MimeType));
+compressible_att_type(MimeType) ->
+ TypeExpList = re:split(
+ couch_config:get("attachments", "compressible_types", ""),
+ ", ?",
+ [{return, list}]
+ ),
+ lists:any(
+ fun(TypeExp) ->
+ Regexp = ["^\\s*", re:replace(TypeExp, "\\*", ".*"), "\\s*$"],
+ re:run(MimeType, Regexp, [caseless]) =/= nomatch
+ end,
+ [T || T <- TypeExpList, T /= []]
+ ).
+
+-spec md5(Data::(iolist() | binary())) -> Digest::binary().
+md5(Data) ->
+ try crypto:md5(Data) catch error:_ -> erlang:md5(Data) end.
+
+-spec md5_init() -> Context::binary().
+md5_init() ->
+ try crypto:md5_init() catch error:_ -> erlang:md5_init() end.
+
+-spec md5_update(Context::binary(), Data::(iolist() | binary())) ->
+ NewContext::binary().
+md5_update(Ctx, D) ->
+ try crypto:md5_update(Ctx,D) catch error:_ -> erlang:md5_update(Ctx,D) end.
+
+-spec md5_final(Context::binary()) -> Digest::binary().
+md5_final(Ctx) ->
+ try crypto:md5_final(Ctx) catch error:_ -> erlang:md5_final(Ctx) end.
+
+% linear search is faster for small lists, length() is 0.5 ms for 100k list
+reorder_results(Keys, SortedResults) when length(Keys) < 100 ->
+ [couch_util:get_value(Key, SortedResults) || Key <- Keys];
+reorder_results(Keys, SortedResults) ->
+ KeyDict = dict:from_list(SortedResults),
+ [dict:fetch(Key, KeyDict) || Key <- Keys].
+
+url_strip_password(Url) ->
+ re:replace(Url,
+ "http(s)?://([^:]+):[^@]+@(.*)$",
+ "http\\1://\\2:*****@\\3",
+ [{return, list}]).
+
+encode_doc_id(#doc{id = Id}) ->
+ encode_doc_id(Id);
+encode_doc_id(Id) when is_list(Id) ->
+ encode_doc_id(?l2b(Id));
+encode_doc_id(<<"_design/", Rest/binary>>) ->
+ "_design/" ++ url_encode(Rest);
+encode_doc_id(<<"_local/", Rest/binary>>) ->
+ "_local/" ++ url_encode(Rest);
+encode_doc_id(Id) ->
+ url_encode(Id).
diff --git a/src/couchdb/couch_view.erl b/src/couchdb/couch_view.erl
index f80ce434..911f1aa6 100644
--- a/src/couchdb/couch_view.erl
+++ b/src/couchdb/couch_view.erl
@@ -29,11 +29,9 @@ start_link() ->
gen_server:start_link({local, couch_view}, couch_view, [], []).
get_temp_updater(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
- % make temp group
- % do we need to close this db?
- {ok, _Db, Group} =
+ {ok, Group} =
couch_view_group:open_temp_group(DbName, Language, DesignOptions, MapSrc, RedSrc),
- case gen_server:call(couch_view, {get_group_server, DbName, Group}) of
+ case gen_server:call(couch_view, {get_group_server, DbName, Group}, infinity) of
{ok, Pid} ->
Pid;
Error ->
@@ -41,11 +39,9 @@ get_temp_updater(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
end.
get_group_server(DbName, GroupId) ->
- % get signature for group
case couch_view_group:open_db_group(DbName, GroupId) of
- % do we need to close this db?
- {ok, _Db, Group} ->
- case gen_server:call(couch_view, {get_group_server, DbName, Group}) of
+ {ok, Group} ->
+ case gen_server:call(couch_view, {get_group_server, DbName, Group}, infinity) of
{ok, Pid} ->
Pid;
Error ->
@@ -58,11 +54,22 @@ get_group_server(DbName, GroupId) ->
get_group(Db, GroupId, Stale) ->
MinUpdateSeq = case Stale of
ok -> 0;
+ update_after -> 0;
_Else -> couch_db:get_update_seq(Db)
end,
- couch_view_group:request_group(
- get_group_server(couch_db:name(Db), GroupId),
- MinUpdateSeq).
+ GroupPid = get_group_server(couch_db:name(Db), GroupId),
+ Result = couch_view_group:request_group(GroupPid, MinUpdateSeq),
+ case Stale of
+ update_after ->
+ % best effort, process might die
+ spawn(fun() ->
+ LastSeq = couch_db:get_update_seq(Db),
+ couch_view_group:request_group(GroupPid, LastSeq)
+ end);
+ _ ->
+ ok
+ end,
+ Result.
get_temp_group(Db, Language, DesignOptions, MapSrc, RedSrc) ->
couch_view_group:request_group(
@@ -80,7 +87,7 @@ cleanup_index_files(Db) ->
% make unique list of group sigs
Sigs = lists:map(fun(#doc{id = GroupId}) ->
{ok, Info} = get_group_info(Db, GroupId),
- ?b2l(proplists:get_value(signature, Info))
+ ?b2l(couch_util:get_value(signature, Info))
end, [DD||DD <- DesignDocs, DD#doc.deleted == false]),
FileList = list_index_files(Db),
@@ -90,11 +97,12 @@ cleanup_index_files(Db) ->
% filter out the ones in use
DeleteFiles = [FilePath
- || FilePath <- FileList,
- re:run(FilePath, RegExp, [{capture, none}]) =:= nomatch],
+ || FilePath <- FileList,
+ re:run(FilePath, RegExp, [{capture, none}]) =:= nomatch],
% delete unused files
?LOG_DEBUG("deleting unused view index files: ~p",[DeleteFiles]),
- [file:delete(File)||File <- DeleteFiles],
+ RootDir = couch_config:get("couchdb", "view_index_dir"),
+ [couch_file:delete(RootDir,File,false)||File <- DeleteFiles],
ok.
list_index_files(Db) ->
@@ -266,46 +274,65 @@ init([]) ->
ets:new(group_servers_by_sig, [set, protected, named_table]),
ets:new(couch_groups_by_updater, [set, private, named_table]),
process_flag(trap_exit, true),
+ ok = couch_file:init_delete_dir(RootDir),
{ok, #server{root_dir=RootDir}}.
-terminate(Reason, _Srv) ->
- couch_util:terminate_linked(Reason),
+terminate(_Reason, _Srv) ->
+ [couch_util:shutdown_sync(Pid) || {Pid, _} <-
+ ets:tab2list(couch_groups_by_updater)],
ok.
-handle_call({get_group_server, DbName,
- #group{name=GroupId,sig=Sig}=Group}, _From, #server{root_dir=Root}=Server) ->
+handle_call({get_group_server, DbName, #group{sig=Sig}=Group}, From,
+ #server{root_dir=Root}=Server) ->
case ets:lookup(group_servers_by_sig, {DbName, Sig}) of
[] ->
- ?LOG_DEBUG("Spawning new group server for view group ~s in database ~s.",
- [GroupId, DbName]),
- case (catch couch_view_group:start_link({Root, DbName, Group})) of
- {ok, NewPid} ->
- add_to_ets(NewPid, DbName, Sig),
- {reply, {ok, NewPid}, Server};
- Error ->
- {reply, Error, Server}
- end;
+ spawn_monitor(fun() -> new_group(Root, DbName, Group) end),
+ ets:insert(group_servers_by_sig, {{DbName, Sig}, [From]}),
+ {noreply, Server};
+ [{_, WaitList}] when is_list(WaitList) ->
+ ets:insert(group_servers_by_sig, {{DbName, Sig}, [From | WaitList]}),
+ {noreply, Server};
[{_, ExistingPid}] ->
{reply, {ok, ExistingPid}, Server}
- end.
+ end;
+
+handle_call({reset_indexes, DbName}, _From, #server{root_dir=Root}=Server) ->
+ do_reset_indexes(DbName, Root),
+ {reply, ok, Server}.
handle_cast({reset_indexes, DbName}, #server{root_dir=Root}=Server) ->
+ do_reset_indexes(DbName, Root),
+ {noreply, Server}.
+
+new_group(Root, DbName, #group{name=GroupId, sig=Sig} = Group) ->
+ ?LOG_DEBUG("Spawning new group server for view group ~s in database ~s.",
+ [GroupId, DbName]),
+ case (catch couch_view_group:start_link({Root, DbName, Group})) of
+ {ok, NewPid} ->
+ unlink(NewPid),
+ exit({DbName, Sig, {ok, NewPid}});
+ {error, invalid_view_seq} ->
+ ok = gen_server:call(couch_view, {reset_indexes, DbName}),
+ new_group(Root, DbName, Group);
+ Error ->
+ exit({DbName, Sig, Error})
+ end.
+
+do_reset_indexes(DbName, Root) ->
% shutdown all the updaters and clear the files, the db got changed
Names = ets:lookup(couch_groups_by_db, DbName),
lists:foreach(
fun({_DbName, Sig}) ->
?LOG_DEBUG("Killing update process for view group ~s. in database ~s.", [Sig, DbName]),
[{_, Pid}] = ets:lookup(group_servers_by_sig, {DbName, Sig}),
- exit(Pid, kill),
- receive {'EXIT', Pid, _} ->
- delete_from_ets(Pid, DbName, Sig)
- end
+ couch_util:shutdown_sync(Pid),
+ delete_from_ets(Pid, DbName, Sig)
end, Names),
delete_index_dir(Root, DbName),
- file:delete(Root ++ "/." ++ binary_to_list(DbName) ++ "_temp"),
- {noreply, Server}.
+ RootDelDir = couch_config:get("couchdb", "view_index_dir"),
+ couch_file:delete(RootDelDir, Root ++ "/." ++ ?b2l(DbName) ++ "_temp").
handle_info({'EXIT', FromPid, Reason}, Server) ->
case ets:lookup(couch_groups_by_updater, FromPid) of
@@ -319,6 +346,15 @@ handle_info({'EXIT', FromPid, Reason}, Server) ->
[{_, {DbName, GroupId}}] ->
delete_from_ets(FromPid, DbName, GroupId)
end,
+ {noreply, Server};
+
+handle_info({'DOWN', _, _, _, {DbName, Sig, Reply}}, Server) ->
+ [{_, WaitList}] = ets:lookup(group_servers_by_sig, {DbName, Sig}),
+ [gen_server:reply(From, Reply) || From <- WaitList],
+ case Reply of {ok, NewPid} ->
+ link(NewPid),
+ add_to_ets(NewPid, DbName, Sig);
+ _ -> ok end,
{noreply, Server}.
add_to_ets(Pid, DbName, Sig) ->
@@ -336,19 +372,19 @@ code_change(_OldVsn, State, _Extra) ->
delete_index_dir(RootDir, DbName) ->
- nuke_dir(RootDir ++ "/." ++ ?b2l(DbName) ++ "_design").
+ nuke_dir(RootDir, RootDir ++ "/." ++ ?b2l(DbName) ++ "_design").
-nuke_dir(Dir) ->
+nuke_dir(RootDelDir, Dir) ->
case file:list_dir(Dir) of
{error, enoent} -> ok; % doesn't exist
{ok, Files} ->
lists:foreach(
fun(File)->
Full = Dir ++ "/" ++ File,
- case file:delete(Full) of
+ case couch_file:delete(RootDelDir, Full, false) of
ok -> ok;
{error, eperm} ->
- ok = nuke_dir(Full)
+ ok = nuke_dir(RootDelDir, Full)
end
end,
Files),
@@ -358,81 +394,67 @@ nuke_dir(Dir) ->
% keys come back in the language of btree - tuples.
less_json_ids({JsonA, IdA}, {JsonB, IdB}) ->
- case JsonA == JsonB of
- false ->
- less_json(JsonA, JsonB);
- true ->
- IdA < IdB
- end.
-
-
-less_json(A, B) ->
- TypeA = type_sort(A),
- TypeB = type_sort(B),
- if TypeA == TypeB ->
- less_same_type(A, B);
- true ->
- TypeA < TypeB
+ case less_json0(JsonA, JsonB) of
+ 0 ->
+ IdA < IdB;
+ Result ->
+ Result < 0
end.
-type_sort(V) when is_atom(V) -> 0;
-type_sort(V) when is_integer(V) -> 1;
-type_sort(V) when is_float(V) -> 1;
-type_sort(V) when is_binary(V) -> 2;
-type_sort(V) when is_list(V) -> 3;
-type_sort({V}) when is_list(V) -> 4;
-type_sort(V) when is_tuple(V) -> 5.
+less_json(A,B) ->
+ less_json0(A,B) < 0.
+
+less_json0(A,A) -> 0;
+
+less_json0(A,B) when is_atom(A), is_atom(B) -> atom_sort(A) - atom_sort(B);
+less_json0(A,_) when is_atom(A) -> -1;
+less_json0(_,B) when is_atom(B) -> 1;
+
+less_json0(A,B) when is_number(A), is_number(B) -> A - B;
+less_json0(A,_) when is_number(A) -> -1;
+less_json0(_,B) when is_number(B) -> 1;
+
+less_json0(A,B) when is_binary(A), is_binary(B) -> couch_util:collate(A,B);
+less_json0(A,_) when is_binary(A) -> -1;
+less_json0(_,B) when is_binary(B) -> 1;
+
+less_json0(A,B) when is_list(A), is_list(B) -> less_list(A,B);
+less_json0(A,_) when is_list(A) -> -1;
+less_json0(_,B) when is_list(B) -> 1;
+less_json0({A},{B}) when is_list(A), is_list(B) -> less_props(A,B);
+less_json0({A},_) when is_list(A) -> -1;
+less_json0(_,{B}) when is_list(B) -> 1.
atom_sort(null) -> 1;
atom_sort(false) -> 2;
atom_sort(true) -> 3.
-
-less_same_type(A,B) when is_atom(A) ->
- atom_sort(A) < atom_sort(B);
-less_same_type(A,B) when is_binary(A) ->
- couch_util:collate(A, B) < 0;
-less_same_type({AProps}, {BProps}) ->
- less_props(AProps, BProps);
-less_same_type(A, B) when is_list(A) ->
- less_list(A, B);
-less_same_type(A, B) ->
- A < B.
-
less_props([], [_|_]) ->
- true;
+ -1;
less_props(_, []) ->
- false;
+ 1;
less_props([{AKey, AValue}|RestA], [{BKey, BValue}|RestB]) ->
case couch_util:collate(AKey, BKey) of
- -1 -> true;
- 1 -> false;
0 ->
- case less_json(AValue, BValue) of
- true -> true;
- false ->
- case less_json(BValue, AValue) of
- true -> false;
- false ->
- less_props(RestA, RestB)
- end
- end
+ case less_json0(AValue, BValue) of
+ 0 ->
+ less_props(RestA, RestB);
+ Result ->
+ Result
+ end;
+ Result ->
+ Result
end.
less_list([], [_|_]) ->
- true;
+ -1;
less_list(_, []) ->
- false;
+ 1;
less_list([A|RestA], [B|RestB]) ->
- case less_json(A,B) of
- true -> true;
- false ->
- case less_json(B,A) of
- true -> false;
- false ->
- less_list(RestA, RestB)
- end
+ case less_json0(A,B) of
+ 0 ->
+ less_list(RestA, RestB);
+ Result ->
+ Result
end.
-
-
diff --git a/src/couchdb/couch_view_group.erl b/src/couchdb/couch_view_group.erl
index 6589bc6a..3cf829b6 100644
--- a/src/couchdb/couch_view_group.erl
+++ b/src/couchdb/couch_view_group.erl
@@ -32,7 +32,8 @@
compactor_pid=nil,
waiting_commit=false,
waiting_list=[],
- ref_counter=nil
+ ref_counter=nil,
+ db_update_notifier=nil
}).
% api methods
@@ -75,23 +76,37 @@ start_link(InitArgs) ->
end.
% init creates a closure which spawns the appropriate view_updater.
-init({InitArgs, ReturnPid, Ref}) ->
+init({{_, DbName, _} = InitArgs, ReturnPid, Ref}) ->
process_flag(trap_exit, true),
- case prepare_group(InitArgs, false) of
- {ok, #group{db=Db, fd=Fd}=Group} ->
- couch_db:monitor(Db),
- Owner = self(),
- Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group) end),
- {ok, RefCounter} = couch_ref_counter:start([Fd]),
- {ok, #group_state{
- db_name=couch_db:name(Db),
- init_args=InitArgs,
- updater_pid = Pid,
- group=Group,
- ref_counter=RefCounter}};
+ try prepare_group(InitArgs, false) of
+ {ok, #group{db=Db, fd=Fd, current_seq=Seq}=Group} ->
+ case Seq > couch_db:get_update_seq(Db) of
+ true ->
+ ReturnPid ! {Ref, self(), {error, invalid_view_seq}},
+ ignore;
+ _ ->
+ couch_db:monitor(Db),
+ {ok, RefCounter} = couch_ref_counter:start([Fd]),
+ Server = self(),
+ {ok, Notifier} = couch_db_update_notifier:start_link(
+ fun({compacted, DbName1}) when DbName1 =:= DbName ->
+ ok = gen_server:cast(Server, reopen_db);
+ (_) ->
+ ok
+ end),
+ {ok, #group_state{
+ db_update_notifier=Notifier,
+ db_name=couch_db:name(Db),
+ init_args=InitArgs,
+ group=Group,
+ ref_counter=RefCounter}}
+ end;
Error ->
ReturnPid ! {Ref, self(), Error},
ignore
+ catch exit:no_db_file ->
+ ReturnPid ! {Ref, self(), {error, no_db_file}},
+ ignore
end.
@@ -113,11 +128,11 @@ init({InitArgs, ReturnPid, Ref}) ->
handle_call({request_group, RequestSeq}, From,
#group_state{
db_name=DbName,
- group=#group{current_seq=Seq}=Group,
+ group=#group{current_seq=Seq, db=OldDb}=Group,
updater_pid=nil,
waiting_list=WaitList
}=State) when RequestSeq > Seq ->
- {ok, Db} = couch_db:open(DbName, []),
+ {ok, Db} = reopen_db(DbName, OldDb),
Group2 = Group#group{db=Db},
Owner = self(),
Pid = spawn_link(fun()-> couch_view_updater:update(Owner, Group2) end),
@@ -152,11 +167,11 @@ handle_call(request_group_info, _From, State) ->
handle_cast({start_compact, CompactFun}, #group_state{compactor_pid=nil}
= State) ->
#group_state{
- group = #group{name = GroupId, sig = GroupSig} = Group,
+ group = #group{name = GroupId, sig = GroupSig, db = OldDb} = Group,
init_args = {RootDir, DbName, _}
} = State,
?LOG_INFO("View index compaction starting for ~s ~s", [DbName, GroupId]),
- {ok, Db} = couch_db:open(DbName, []),
+ {ok, Db} = reopen_db(DbName, OldDb),
{ok, Fd} = open_index_file(compact, RootDir, DbName, GroupSig),
NewGroup = reset_file(Db, Fd, DbName, Group),
Pid = spawn_link(fun() -> CompactFun(Group, NewGroup) end),
@@ -170,15 +185,16 @@ handle_cast({compact_done, #group{current_seq=NewSeq} = NewGroup},
when NewSeq >= OldSeq ->
#group_state{
group = #group{name=GroupId, fd=OldFd, sig=GroupSig} = Group,
- init_args = {RootDir, DbName, _},
+ init_args = {RootDir, DbName, _},
updater_pid = UpdaterPid,
+ compactor_pid = CompactorPid,
ref_counter = RefCounter
} = State,
?LOG_INFO("View index compaction complete for ~s ~s", [DbName, GroupId]),
FileName = index_file_name(RootDir, DbName, GroupSig),
CompactName = index_file_name(compact, RootDir, DbName, GroupSig),
- file:delete(FileName),
+ ok = couch_file:delete(RootDir, FileName),
ok = file:rename(CompactName, FileName),
%% if an updater is running, kill it and start a new one
@@ -193,6 +209,8 @@ handle_cast({compact_done, #group{current_seq=NewSeq} = NewGroup},
end,
%% cleanup old group
+ unlink(CompactorPid),
+ receive {'EXIT', CompactorPid, normal} -> ok after 0 -> ok end,
unlink(OldFd),
couch_ref_counter:drop(RefCounter),
{ok, NewRefCounter} = couch_ref_counter:start([NewGroup#group.fd]),
@@ -216,13 +234,14 @@ handle_cast({compact_done, NewGroup}, State) ->
?LOG_INFO("View index compaction still behind for ~s ~s -- current: ~p " ++
"compact: ~p", [DbName, GroupId, CurrentSeq, NewGroup#group.current_seq]),
couch_db:close(NewGroup#group.db),
- {ok, Db} = couch_db:open(DbName, []),
Pid = spawn_link(fun() ->
+ {ok, Db} = couch_db:open_int(DbName, []),
{_,Ref} = erlang:spawn_monitor(fun() ->
couch_view_updater:update(nil, NewGroup#group{db = Db})
end),
receive
{'DOWN', Ref, _, _, {new_group, NewGroup2}} ->
+ couch_db:close(Db),
#group{name=GroupId} = NewGroup2,
Pid2 = couch_view:get_group_server(DbName, GroupId),
gen_server:cast(Pid2, {compact_done, NewGroup2})
@@ -246,10 +265,14 @@ handle_cast({partial_update, Pid, NewGroup}, #group_state{updater_pid=Pid}
{noreply, State#group_state{group=NewGroup, waiting_commit=true}};
handle_cast({partial_update, _, _}, State) ->
%% message from an old (probably pre-compaction) updater; ignore
- {noreply, State}.
+ {noreply, State};
+
+handle_cast(reopen_db, #group_state{group = Group, db_name = DbName} = State) ->
+ {ok, Db} = reopen_db(DbName, Group#group.db),
+ {noreply, State#group_state{group = Group#group{db = Db}}}.
handle_info(delayed_commit, #group_state{db_name=DbName,group=Group}=State) ->
- {ok, Db} = couch_db:open(DbName, []),
+ {ok, Db} = couch_db:open_int(DbName, []),
CommittedSeq = couch_db:get_committed_update_seq(Db),
couch_db:close(Db),
if CommittedSeq >= Group#group.current_seq ->
@@ -284,7 +307,7 @@ handle_info({'EXIT', FromPid, {new_group, #group{db=Db}=Group}},
group=Group#group{db=nil}, updater_pid=nil}};
StillWaiting ->
% we still have some waiters, reopen the database and reupdate the index
- {ok, Db2} = couch_db:open(DbName, []),
+ {ok, Db2} = couch_db:open_int(DbName, []),
Group2 = Group#group{db=Db2},
Owner = self(),
Pid = spawn_link(fun() -> couch_view_updater:update(Owner, Group2) end),
@@ -332,9 +355,10 @@ handle_info({'DOWN',_,_,_,_}, State) ->
terminate(Reason, #group_state{updater_pid=Update, compactor_pid=Compact}=S) ->
+ couch_db_update_notifier:stop(S#group_state.db_update_notifier),
reply_all(S, Reason),
- catch exit(Update, Reason),
- catch exit(Compact, Reason),
+ couch_util:shutdown_sync(Update),
+ couch_util:shutdown_sync(Compact),
ok.
code_change(_OldVsn, State, _Extra) ->
@@ -363,8 +387,8 @@ reply_all(#group_state{waiting_list=WaitList}=State, Reply) ->
[catch gen_server:reply(Pid, Reply) || {Pid, _} <- WaitList],
State#group_state{waiting_list=[]}.
-prepare_group({RootDir, DbName, #group{sig=Sig}=Group}, ForceReset)->
- case couch_db:open(DbName, []) of
+prepare_group({RootDir, DbName, #group{sig=Sig, db=OldDb}=Group}, ForceReset)->
+ case reopen_db(DbName, OldDb) of
{ok, Db} ->
case open_index_file(RootDir, DbName, Sig) of
{ok, Fd} ->
@@ -393,11 +417,15 @@ prepare_group({RootDir, DbName, #group{sig=Sig}=Group}, ForceReset)->
get_index_header_data(#group{current_seq=Seq, purge_seq=PurgeSeq,
id_btree=IdBtree,views=Views}) ->
- ViewStates = [couch_btree:get_state(Btree) || #view{btree=Btree} <- Views],
- #index_header{seq=Seq,
- purge_seq=PurgeSeq,
- id_btree_state=couch_btree:get_state(IdBtree),
- view_states=ViewStates}.
+ ViewStates = [
+ {couch_btree:get_state(V#view.btree), V#view.update_seq, V#view.purge_seq} || V <- Views
+ ],
+ #index_header{
+ seq=Seq,
+ purge_seq=PurgeSeq,
+ id_btree_state=couch_btree:get_state(IdBtree),
+ view_states=ViewStates
+ }.
hex_sig(GroupSig) ->
couch_util:to_hex(?b2l(GroupSig)).
@@ -429,7 +457,7 @@ open_index_file(compact, RootDir, DbName, GroupSig) ->
end.
open_temp_group(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
- case couch_db:open(DbName, []) of
+ case couch_db:open_int(DbName, []) of
{ok, Db} ->
View = #view{map_names=[<<"_temp">>],
id_num=0,
@@ -437,8 +465,8 @@ open_temp_group(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
def=MapSrc,
reduce_funs= if RedSrc==[] -> []; true -> [{<<"_temp">>, RedSrc}] end,
options=DesignOptions},
-
- {ok, Db, set_view_sig(#group{name = <<"_temp">>, db=Db, views=[View],
+ couch_db:close(Db),
+ {ok, set_view_sig(#group{name = <<"_temp">>,lib={[]}, views=[View],
def_lang=Language, design_options=DesignOptions})};
Error ->
Error
@@ -446,16 +474,48 @@ open_temp_group(DbName, Language, DesignOptions, MapSrc, RedSrc) ->
set_view_sig(#group{
views=Views,
+ lib={[]},
+ def_lang=Language,
+ design_options=DesignOptions}=G) ->
+ ViewInfo = [old_view_format(V) || V <- Views],
+ G#group{sig=couch_util:md5(term_to_binary({ViewInfo, Language, DesignOptions}))};
+set_view_sig(#group{
+ views=Views,
+ lib=Lib,
def_lang=Language,
design_options=DesignOptions}=G) ->
- G#group{sig=erlang:md5(term_to_binary({Views, Language, DesignOptions}))}.
+ ViewInfo = [old_view_format(V) || V <- Views],
+ G#group{sig=couch_util:md5(term_to_binary({ViewInfo, Language, DesignOptions, sort_lib(Lib)}))}.
+
+% Use the old view record format so group sig's don't change
+old_view_format(View) ->
+ {
+ view,
+ View#view.id_num,
+ View#view.map_names,
+ View#view.def,
+ View#view.btree,
+ View#view.reduce_funs,
+ View#view.options
+ }.
+
+sort_lib({Lib}) ->
+ sort_lib(Lib, []).
+sort_lib([], LAcc) ->
+ lists:keysort(1, LAcc);
+sort_lib([{LName, {LObj}}|Rest], LAcc) ->
+ LSorted = sort_lib(LObj, []), % descend into nested object
+ sort_lib(Rest, [{LName, LSorted}|LAcc]);
+sort_lib([{LName, LCode}|Rest], LAcc) ->
+ sort_lib(Rest, [{LName, LCode}|LAcc]).
open_db_group(DbName, GroupId) ->
- case couch_db:open(DbName, []) of
+ case couch_db:open_int(DbName, []) of
{ok, Db} ->
case couch_db:open_doc(Db, GroupId) of
{ok, Doc} ->
- {ok, Db, design_doc_to_view_group(Doc)};
+ couch_db:close(Db),
+ {ok, design_doc_to_view_group(Doc)};
Else ->
couch_db:close(Db),
Else
@@ -494,36 +554,39 @@ get_group_info(State) ->
% maybe move to another module
design_doc_to_view_group(#doc{id=Id,body={Fields}}) ->
- Language = proplists:get_value(<<"language">>, Fields, <<"javascript">>),
- {DesignOptions} = proplists:get_value(<<"options">>, Fields, {[]}),
- {RawViews} = proplists:get_value(<<"views">>, Fields, {[]}),
+ Language = couch_util:get_value(<<"language">>, Fields, <<"javascript">>),
+ {DesignOptions} = couch_util:get_value(<<"options">>, Fields, {[]}),
+ {RawViews} = couch_util:get_value(<<"views">>, Fields, {[]}),
+ Lib = couch_util:get_value(<<"lib">>, RawViews, {[]}),
% add the views to a dictionary object, with the map source as the key
DictBySrc =
lists:foldl(
fun({Name, {MRFuns}}, DictBySrcAcc) ->
- MapSrc = proplists:get_value(<<"map">>, MRFuns),
- RedSrc = proplists:get_value(<<"reduce">>, MRFuns, null),
- {ViewOptions} = proplists:get_value(<<"options">>, MRFuns, {[]}),
- View =
- case dict:find({MapSrc, ViewOptions}, DictBySrcAcc) of
- {ok, View0} -> View0;
- error -> #view{def=MapSrc, options=ViewOptions} % create new view object
- end,
- View2 =
- if RedSrc == null ->
- View#view{map_names=[Name|View#view.map_names]};
- true ->
- View#view{reduce_funs=[{Name,RedSrc}|View#view.reduce_funs]}
- end,
- dict:store({MapSrc, ViewOptions}, View2, DictBySrcAcc)
+ case couch_util:get_value(<<"map">>, MRFuns) of
+ undefined -> DictBySrcAcc;
+ MapSrc ->
+ RedSrc = couch_util:get_value(<<"reduce">>, MRFuns, null),
+ {ViewOptions} = couch_util:get_value(<<"options">>, MRFuns, {[]}),
+ View =
+ case dict:find({MapSrc, ViewOptions}, DictBySrcAcc) of
+ {ok, View0} -> View0;
+ error -> #view{def=MapSrc, options=ViewOptions} % create new view object
+ end,
+ View2 =
+ if RedSrc == null ->
+ View#view{map_names=[Name|View#view.map_names]};
+ true ->
+ View#view{reduce_funs=[{Name,RedSrc}|View#view.reduce_funs]}
+ end,
+ dict:store({MapSrc, ViewOptions}, View2, DictBySrcAcc)
+ end
end, dict:new(), RawViews),
% number the views
{Views, _N} = lists:mapfoldl(
fun({_Src, View}, N) ->
{View#view{id_num=N},N+1}
end, 0, lists:sort(dict:to_list(DictBySrc))),
-
- set_view_sig(#group{name=Id, views=Views, def_lang=Language, design_options=DesignOptions}).
+ set_view_sig(#group{name=Id, lib=Lib, views=Views, def_lang=Language, design_options=DesignOptions}).
reset_group(#group{views=Views}=Group) ->
Views2 = [View#view{btree=nil} || View <- Views],
@@ -537,19 +600,24 @@ reset_file(Db, Fd, DbName, #group{sig=Sig,name=Name} = Group) ->
init_group(Db, Fd, reset_group(Group), nil).
delete_index_file(RootDir, DbName, GroupSig) ->
- file:delete(index_file_name(RootDir, DbName, GroupSig)).
+ couch_file:delete(RootDir, index_file_name(RootDir, DbName, GroupSig)).
init_group(Db, Fd, #group{views=Views}=Group, nil) ->
init_group(Db, Fd, Group,
#index_header{seq=0, purge_seq=couch_db:get_purge_seq(Db),
- id_btree_state=nil, view_states=[nil || _ <- Views]});
+ id_btree_state=nil, view_states=[{nil, 0, 0} || _ <- Views]});
init_group(Db, Fd, #group{def_lang=Lang,views=Views}=
Group, IndexHeader) ->
#index_header{seq=Seq, purge_seq=PurgeSeq,
id_btree_state=IdBtreeState, view_states=ViewStates} = IndexHeader,
+ StateUpdate = fun
+ ({_, _, _}=State) -> State;
+ (State) -> {State, 0, 0}
+ end,
+ ViewStates2 = lists:map(StateUpdate, ViewStates),
{ok, IdBtree} = couch_btree:open(IdBtreeState, Fd),
Views2 = lists:zipwith(
- fun(BtreeState, #view{reduce_funs=RedFuns,options=Options}=View) ->
+ fun({BTState, USeq, PSeq}, #view{reduce_funs=RedFuns,options=Options}=View) ->
FunSrcs = [FunSrc || {_Name, FunSrc} <- RedFuns],
ReduceFun =
fun(reduce, KVs) ->
@@ -566,19 +634,22 @@ init_group(Db, Fd, #group{def_lang=Lang,views=Views}=
{Count, Reduced}
end,
- case proplists:get_value(<<"collation">>, Options, <<"default">>) of
+ case couch_util:get_value(<<"collation">>, Options, <<"default">>) of
<<"default">> ->
Less = fun couch_view:less_json_ids/2;
<<"raw">> ->
Less = fun(A,B) -> A < B end
end,
- {ok, Btree} = couch_btree:open(BtreeState, Fd,
- [{less, Less},
- {reduce, ReduceFun}]),
- View#view{btree=Btree}
+ {ok, Btree} = couch_btree:open(BTState, Fd,
+ [{less, Less}, {reduce, ReduceFun}]
+ ),
+ View#view{btree=Btree, update_seq=USeq, purge_seq=PSeq}
end,
- ViewStates, Views),
+ ViewStates2, Views),
Group#group{db=Db, fd=Fd, current_seq=Seq, purge_seq=PurgeSeq,
id_btree=IdBtree, views=Views2}.
-
+reopen_db(DbName, nil) ->
+ couch_db:open_int(DbName, []);
+reopen_db(_DbName, Db) ->
+ couch_db:reopen(Db).
diff --git a/src/couchdb/couch_view_updater.erl b/src/couchdb/couch_view_updater.erl
index aa7d98fc..8e089fa9 100644
--- a/src/couchdb/couch_view_updater.erl
+++ b/src/couchdb/couch_view_updater.erl
@@ -38,8 +38,10 @@ update(Owner, Group) ->
couch_task_status:update(<<"Resetting view index due to lost purge entries.">>),
exit(reset)
end,
- {ok, MapQueue} = couch_work_queue:new(100000, 500),
- {ok, WriteQueue} = couch_work_queue:new(100000, 500),
+ {ok, MapQueue} = couch_work_queue:new(
+ [{max_size, 100000}, {max_items, 500}]),
+ {ok, WriteQueue} = couch_work_queue:new(
+ [{max_size, 100000}, {max_items, 500}]),
Self = self(),
ViewEmptyKVs = [{View, []} || View <- Group2#group.views],
spawn_link(fun() -> do_maps(Group, MapQueue, WriteQueue, ViewEmptyKVs) end),
@@ -49,9 +51,9 @@ update(Owner, Group) ->
% update status every half second
couch_task_status:set_update_frequency(500),
#group{ design_options = DesignOptions } = Group,
- IncludeDesign = proplists:get_value(<<"include_design">>,
+ IncludeDesign = couch_util:get_value(<<"include_design">>,
DesignOptions, false),
- LocalSeq = proplists:get_value(<<"local_seq">>, DesignOptions, false),
+ LocalSeq = couch_util:get_value(<<"local_seq">>, DesignOptions, false),
DocOpts =
case LocalSeq of
true -> [conflicts, deleted_conflicts, local_seq];
@@ -94,19 +96,25 @@ purge_index(#group{db=Db, views=Views, id_btree=IdBtree}=Group) ->
end, dict:new(), Lookups),
% Now remove the values from the btrees
+ PurgeSeq = couch_db:get_purge_seq(Db),
Views2 = lists:map(
fun(#view{id_num=Num,btree=Btree}=View) ->
case dict:find(Num, ViewKeysToRemoveDict) of
{ok, RemoveKeys} ->
- {ok, Btree2} = couch_btree:add_remove(Btree, [], RemoveKeys),
- View#view{btree=Btree2};
+ {ok, ViewBtree2} = couch_btree:add_remove(Btree, [], RemoveKeys),
+ case ViewBtree2 =/= Btree of
+ true ->
+ View#view{btree=ViewBtree2, purge_seq=PurgeSeq};
+ _ ->
+ View#view{btree=ViewBtree2}
+ end;
error -> % no keys to remove in this view
View
end
end, Views),
Group#group{id_btree=IdBtree2,
views=Views2,
- purge_seq=couch_db:get_purge_seq(Db)}.
+ purge_seq=PurgeSeq}.
load_doc(Db, DocInfo, MapQueue, DocOpts, IncludeDesign) ->
@@ -201,12 +209,12 @@ view_insert_doc_query_results(#doc{id=DocId}=Doc, [ResultKVs|RestResults], [{Vie
view_compute(Group, []) ->
{Group, []};
-view_compute(#group{def_lang=DefLang, query_server=QueryServerIn}=Group, Docs) ->
+view_compute(#group{def_lang=DefLang, lib=Lib, query_server=QueryServerIn}=Group, Docs) ->
{ok, QueryServer} =
case QueryServerIn of
nil -> % doc map not started
Definitions = [View#view.def || View <- Group#group.views],
- couch_query_servers:start_doc_map(DefLang, Definitions);
+ couch_query_servers:start_doc_map(DefLang, Definitions, Lib);
_ ->
{ok, QueryServerIn}
end,
@@ -245,7 +253,12 @@ write_changes(Group, ViewKeyValuesToAdd, DocIdViewIdKeys, NewSeq, InitialBuild)
Views2 = lists:zipwith(fun(View, {_View, AddKeyValues}) ->
KeysToRemove = couch_util:dict_find(View#view.id_num, KeysToRemoveByView, []),
{ok, ViewBtree2} = couch_btree:add_remove(View#view.btree, AddKeyValues, KeysToRemove),
- View#view{btree = ViewBtree2}
+ case ViewBtree2 =/= View#view.btree of
+ true ->
+ View#view{btree=ViewBtree2, update_seq=NewSeq};
+ _ ->
+ View#view{btree=ViewBtree2}
+ end
end, Group#group.views, ViewKeyValuesToAdd),
Group#group{views=Views2, current_seq=NewSeq, id_btree=IdBtree2}.
diff --git a/src/couchdb/couch_work_queue.erl b/src/couchdb/couch_work_queue.erl
index ca9445d3..13ec7335 100644
--- a/src/couchdb/couch_work_queue.erl
+++ b/src/couchdb/couch_work_queue.erl
@@ -13,76 +13,139 @@
-module(couch_work_queue).
-behaviour(gen_server).
--export([new/2,queue/2,dequeue/1,close/1]).
--export([init/1, terminate/2, handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
+% public API
+-export([new/1, queue/2, dequeue/1, dequeue/2, close/1]).
+
+% gen_server callbacks
+-export([init/1, terminate/2]).
+-export([handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
-record(q, {
- buffer=[],
- blocked=[],
+ queue = queue:new(),
+ blocked = [],
max_size,
max_items,
- items=0,
- size=0,
- work_waiter=nil,
- close_on_dequeue=false
+ items = 0,
+ size = 0,
+ work_waiters = [],
+ close_on_dequeue = false,
+ multi_workers = false
}).
-new(MaxSize, MaxItems) ->
- gen_server:start_link(couch_work_queue, {MaxSize, MaxItems}, []).
+
+new(Options) ->
+ gen_server:start_link(couch_work_queue, Options, []).
+
queue(Wq, Item) ->
gen_server:call(Wq, {queue, Item}, infinity).
+
dequeue(Wq) ->
- try gen_server:call(Wq, dequeue, infinity)
+ dequeue(Wq, all).
+
+
+dequeue(Wq, MaxItems) ->
+ try
+ gen_server:call(Wq, {dequeue, MaxItems}, infinity)
catch
_:_ -> closed
end.
+
close(Wq) ->
gen_server:cast(Wq, close).
-init({MaxSize,MaxItems}) ->
- {ok, #q{max_size=MaxSize, max_items=MaxItems}}.
+init(Options) ->
+ Q = #q{
+ max_size = couch_util:get_value(max_size, Options),
+ max_items = couch_util:get_value(max_items, Options),
+ multi_workers = couch_util:get_value(multi_workers, Options, false)
+ },
+ {ok, Q}.
+
+
+terminate(_Reason, #q{work_waiters=Workers}) ->
+ lists:foreach(fun({W, _}) -> gen_server:reply(W, closed) end, Workers).
-terminate(_Reason, #q{work_waiter=nil}) ->
- ok;
-terminate(_Reason, #q{work_waiter=WW}) ->
- gen_server:reply(WW, closed).
-handle_call({queue, Item}, From, #q{work_waiter=nil}=Q0) ->
- Q = Q0#q{size=Q0#q.size + byte_size(term_to_binary(Item)),
- items=Q0#q.items + 1,
- buffer=[Item | Q0#q.buffer]},
+handle_call({queue, Item}, From, #q{work_waiters = []} = Q0) ->
+ Q = Q0#q{size = Q0#q.size + byte_size(term_to_binary(Item)),
+ items = Q0#q.items + 1,
+ queue = queue:in(Item, Q0#q.queue)},
case (Q#q.size >= Q#q.max_size) orelse
(Q#q.items >= Q#q.max_items) of
true ->
- {noreply, Q#q{blocked=[From | Q#q.blocked]}};
+ {noreply, Q#q{blocked = [From | Q#q.blocked]}};
false ->
{reply, ok, Q}
end;
-handle_call({queue, Item}, _From, #q{work_waiter=WW}=Q) ->
- gen_server:reply(WW, {ok, [Item]}),
- {reply, ok, Q#q{work_waiter=nil}};
-handle_call(dequeue, _From, #q{work_waiter=WW}) when WW /= nil ->
- exit("Only one caller allowed to wait for work at a time");
-handle_call(dequeue, From, #q{items=0}=Q) ->
- {noreply, Q#q{work_waiter=From}};
-handle_call(dequeue, _From, #q{buffer=Buff, max_size=MaxSize,
- max_items=MaxItems, close_on_dequeue=Close}=Q) ->
- [gen_server:reply(From, ok) || From <- Q#q.blocked],
- Q2 = #q{max_size=MaxSize, max_items=MaxItems},
- if Close ->
- {stop, normal, {ok, Buff}, Q2};
+
+handle_call({queue, Item}, _From, #q{work_waiters = [{W, _Max} | Rest]} = Q) ->
+ gen_server:reply(W, {ok, [Item]}),
+ {reply, ok, Q#q{work_waiters = Rest}};
+
+handle_call({dequeue, Max}, From, Q) ->
+ #q{work_waiters = Workers, multi_workers = Multi, items = Count} = Q,
+ case {Workers, Multi} of
+ {[_ | _], false} ->
+ exit("Only one caller allowed to wait for this work at a time");
+ {[_ | _], true} ->
+ {noreply, Q#q{work_waiters=Workers ++ [{From, Max}]}};
+ _ ->
+ case Count of
+ 0 ->
+ {noreply, Q#q{work_waiters=Workers ++ [{From, Max}]}};
+ C when C > 0 ->
+ deliver_queue_items(Max, Q)
+ end
+ end.
+
+
+deliver_queue_items(Max, Q) ->
+ #q{
+ queue = Queue,
+ items = Count,
+ close_on_dequeue = Close,
+ blocked = Blocked
+ } = Q,
+ case (Max =:= all) orelse (Max >= Count) of
+ false ->
+ {Items, Queue2, Blocked2} = dequeue_items(Max, Queue, Blocked, []),
+ Q2 = Q#q{items = Count - Max, blocked = Blocked2, queue = Queue2},
+ {reply, {ok, Items}, Q2};
true ->
- {reply, {ok, Buff}, #q{max_size=MaxSize, max_items=MaxItems}}
+ lists:foreach(fun(F) -> gen_server:reply(F, ok) end, Blocked),
+ Q2 = Q#q{items = 0, size = 0, blocked = [], queue = queue:new()},
+ case Close of
+ false ->
+ {reply, {ok, queue:to_list(Queue)}, Q2};
+ true ->
+ {stop, normal, {ok, queue:to_list(Queue)}, Q2}
+ end
end.
-handle_cast(close, #q{buffer=[]}=Q) ->
+
+dequeue_items(0, Queue, Blocked, DequeuedAcc) ->
+ {lists:reverse(DequeuedAcc), Queue, Blocked};
+
+dequeue_items(NumItems, Queue, Blocked, DequeuedAcc) ->
+ {{value, Item}, Queue2} = queue:out(Queue),
+ case Blocked of
+ [] ->
+ Blocked2 = Blocked;
+ [From | Blocked2] ->
+ gen_server:reply(From, ok)
+ end,
+ dequeue_items(NumItems - 1, Queue2, Blocked2, [Item | DequeuedAcc]).
+
+
+handle_cast(close, #q{items = 0} = Q) ->
{stop, normal, Q};
+
handle_cast(close, Q) ->
- {noreply, Q#q{close_on_dequeue=true}}.
+ {noreply, Q#q{close_on_dequeue = true}}.
code_change(_OldVsn, State, _Extra) ->
diff --git a/src/couchdb/priv/Makefile.am b/src/couchdb/priv/Makefile.am
index 5b76f8cd..b36d828d 100644
--- a/src/couchdb/priv/Makefile.am
+++ b/src/couchdb/priv/Makefile.am
@@ -47,7 +47,7 @@ COUCHJS_SRCS = \
locallibbin_PROGRAMS = couchjs
couchjs_SOURCES = $(COUCHJS_SRCS)
couchjs_LDFLAGS = $(CURL_LDFLAGS)
-couchjs_CFLAGS = $(CURL_CFLAGS)
+couchjs_CFLAGS = -D_BSD_SOURCE $(CURL_CFLAGS)
couchjs_LDADD = $(CURL_LDFLAGS) @JSLIB@
couchpriv_DATA = stat_descriptions.cfg
diff --git a/src/couchdb/priv/couch_js/http.c b/src/couchdb/priv/couch_js/http.c
index 998c2439..6c2a8a82 100644
--- a/src/couchdb/priv/couch_js/http.c
+++ b/src/couchdb/priv/couch_js/http.c
@@ -10,6 +10,7 @@
// License for the specific language governing permissions and limitations under
// the License.
+#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <jsapi.h>
@@ -403,7 +404,8 @@ go(JSContext* cx, JSObject* obj, HTTPData* http, char* body, size_t bodylen)
{
HTTP_HANDLE = curl_easy_init();
curl_easy_setopt(HTTP_HANDLE, CURLOPT_READFUNCTION, send_body);
- curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKFUNCTION, seek_body);
+ curl_easy_setopt(HTTP_HANDLE, CURLOPT_SEEKFUNCTION,
+ (curl_seek_callback) seek_body);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_HEADERFUNCTION, recv_header);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_WRITEFUNCTION, recv_body);
curl_easy_setopt(HTTP_HANDLE, CURLOPT_NOPROGRESS, 1);
diff --git a/src/couchdb/priv/stat_descriptions.cfg.in b/src/couchdb/priv/stat_descriptions.cfg.in
index 554124cf..b80d7684 100644
--- a/src/couchdb/priv/stat_descriptions.cfg.in
+++ b/src/couchdb/priv/stat_descriptions.cfg.in
@@ -1,3 +1,15 @@
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+%% use this file except in compliance with the License. You may obtain a copy of
+%% the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+%% License for the specific language governing permissions and limitations under
+%% the License.
+
% Style guide for descriptions: Start with a lowercase letter & do not add
% a trailing full-stop / period
% Please keep this in alphabetical order
@@ -7,6 +19,8 @@
{couchdb, open_databases, "number of open databases"}.
{couchdb, open_os_files, "number of file descriptors CouchDB has open"}.
{couchdb, request_time, "length of a request inside CouchDB without MochiWeb"}.
+{couchdb, auth_cache_hits, "number of authentication cache hits"}.
+{couchdb, auth_cache_misses, "number of authentication cache misses"}.
{httpd, bulk_requests, "number of bulk requests"}.
{httpd, requests, "number of HTTP requests"}.
@@ -18,7 +32,6 @@
{httpd_request_methods, 'DELETE', "number of HTTP DELETE requests"}.
{httpd_request_methods, 'GET', "number of HTTP GET requests"}.
{httpd_request_methods, 'HEAD', "number of HTTP HEAD requests"}.
-{httpd_request_methods, 'MOVE', "number of HTTP MOVE requests"}.
{httpd_request_methods, 'POST', "number of HTTP POST requests"}.
{httpd_request_methods, 'PUT', "number of HTTP PUT requests"}.
diff --git a/src/erlang-oauth/Makefile.am b/src/erlang-oauth/Makefile.am
index 1d123396..48b76482 100644
--- a/src/erlang-oauth/Makefile.am
+++ b/src/erlang-oauth/Makefile.am
@@ -12,18 +12,19 @@
oauthebindir = $(localerlanglibdir)/erlang-oauth/ebin
-# Removed oauth_rsa_sha1.erl until we require R12B5 or
-# we add a ./configure option to enable it.
-
oauth_file_collection = \
oauth.app.in \
oauth.erl \
oauth_hmac_sha1.erl \
oauth_http.erl \
oauth_plaintext.erl \
+ oauth_rsa_sha1.erl \
oauth_unix.erl \
oauth_uri.erl
+# Removed oauth_rsa_sha1.beam until we require R12B5 or
+# we add a ./configure option to enable it.
+
oauthebin_make_generated_file_list = \
oauth.app \
oauth.beam \
diff --git a/src/erlang-oauth/oauth_hmac_sha1.erl b/src/erlang-oauth/oauth_hmac_sha1.erl
index 69064edd..79d59f37 100644
--- a/src/erlang-oauth/oauth_hmac_sha1.erl
+++ b/src/erlang-oauth/oauth_hmac_sha1.erl
@@ -8,4 +8,4 @@ signature(BaseString, CS, TS) ->
base64:encode_to_string(crypto:sha_mac(Key, BaseString)).
verify(Signature, BaseString, CS, TS) ->
- Signature =:= signature(BaseString, CS, TS).
+ couch_util:verify(signature(BaseString, CS, TS), Signature).
diff --git a/src/erlang-oauth/oauth_plaintext.erl b/src/erlang-oauth/oauth_plaintext.erl
index d8085e02..41a1e9b2 100644
--- a/src/erlang-oauth/oauth_plaintext.erl
+++ b/src/erlang-oauth/oauth_plaintext.erl
@@ -7,4 +7,4 @@ signature(CS, TS) ->
oauth_uri:calate("&", [CS, TS]).
verify(Signature, CS, TS) ->
- Signature =:= signature(CS, TS).
+ couch_util:verify(signature(CS, TS), Signature).
diff --git a/src/ibrowse/Makefile.am b/src/ibrowse/Makefile.am
index 510f36a9..8c5d3f8e 100644
--- a/src/ibrowse/Makefile.am
+++ b/src/ibrowse/Makefile.am
@@ -10,7 +10,7 @@
## License for the specific language governing permissions and limitations under
## the License.
-ibrowseebindir = $(localerlanglibdir)/ibrowse-1.5.2/ebin
+ibrowseebindir = $(localerlanglibdir)/ibrowse-2.1.0/ebin
ibrowse_file_collection = \
ibrowse.app.in \
diff --git a/src/ibrowse/ibrowse.app.in b/src/ibrowse/ibrowse.app.in
index 4f43dd92..e8580d10 100644
--- a/src/ibrowse/ibrowse.app.in
+++ b/src/ibrowse/ibrowse.app.in
@@ -1,10 +1,10 @@
{application, ibrowse,
[{description, "HTTP client application"},
- {vsn, "1.5.1"},
- {modules, [ ibrowse,
- ibrowse_http_client,
- ibrowse_app,
- ibrowse_sup,
+ {vsn, "2.1.0"},
+ {modules, [ ibrowse,
+ ibrowse_http_client,
+ ibrowse_app,
+ ibrowse_sup,
ibrowse_lib,
ibrowse_lb ]},
{registered, []},
diff --git a/src/ibrowse/ibrowse.erl b/src/ibrowse/ibrowse.erl
index 1913ef59..1a42f4bc 100644
--- a/src/ibrowse/ibrowse.erl
+++ b/src/ibrowse/ibrowse.erl
@@ -6,9 +6,9 @@
%%% Created : 11 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
%%%-------------------------------------------------------------------
%% @author Chandrashekhar Mullaparthi <chandrashekhar dot mullaparthi at gmail dot com>
-%% @copyright 2005-2009 Chandrashekhar Mullaparthi
-%% @version 1.5.2
-%% @doc The ibrowse application implements an HTTP 1.1 client. This
+%% @copyright 2005-2010 Chandrashekhar Mullaparthi
+%% @version 2.1.0
+%% @doc The ibrowse application implements an HTTP 1.1 client in erlang. This
%% module implements the API of the HTTP client. There is one named
%% process called 'ibrowse' which assists in load balancing and maintaining configuration. There is one load balancing process per unique webserver. There is
%% one process to handle one TCP connection to a webserver
@@ -21,22 +21,22 @@
%% <p>Here are a few sample invocations.</p>
%%
%% <code>
-%% ibrowse:send_req("http://intranet/messenger/", [], get).
+%% ibrowse:send_req("http://intranet/messenger/", [], get).
%% <br/><br/>
-%%
-%% ibrowse:send_req("http://www.google.com/", [], get, [],
-%% [{proxy_user, "XXXXX"},
-%% {proxy_password, "XXXXX"},
-%% {proxy_host, "proxy"},
-%% {proxy_port, 8080}], 1000).
+%%
+%% ibrowse:send_req("http://www.google.com/", [], get, [],
+%% [{proxy_user, "XXXXX"},
+%% {proxy_password, "XXXXX"},
+%% {proxy_host, "proxy"},
+%% {proxy_port, 8080}], 1000).
%% <br/><br/>
%%
%%ibrowse:send_req("http://www.erlang.org/download/otp_src_R10B-3.tar.gz", [], get, [],
-%% [{proxy_user, "XXXXX"},
-%% {proxy_password, "XXXXX"},
-%% {proxy_host, "proxy"},
-%% {proxy_port, 8080},
-%% {save_response_to_file, true}], 1000).
+%% [{proxy_user, "XXXXX"},
+%% {proxy_password, "XXXXX"},
+%% {proxy_host, "proxy"},
+%% {proxy_port, 8080},
+%% {save_response_to_file, true}], 1000).
%% <br/><br/>
%%
%% ibrowse:send_req("http://www.erlang.org", [], head).
@@ -48,17 +48,12 @@
%% ibrowse:send_req("http://www.bbc.co.uk", [], trace).
%%
%% <br/><br/>
-%% ibrowse:send_req("http://www.google.com", [], get, [],
+%% ibrowse:send_req("http://www.google.com", [], get, [],
%% [{stream_to, self()}]).
%% </code>
%%
-%% <p>A driver exists which implements URL encoding in C, but the
-%% speed achieved using only erlang has been good enough, so the
-%% driver isn't actually used.</p>
-module(ibrowse).
--vsn('$Id: ibrowse.erl,v 1.8 2009/07/01 22:43:19 chandrusf Exp $ ').
-
-behaviour(gen_server).
%%--------------------------------------------------------------------
%% Include files
@@ -70,48 +65,51 @@
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
- terminate/2, code_change/3]).
+ terminate/2, code_change/3]).
%% API interface
-export([
- rescan_config/0,
- rescan_config/1,
- get_config_value/1,
- get_config_value/2,
- spawn_worker_process/2,
- spawn_link_worker_process/2,
- stop_worker_process/1,
- send_req/3,
- send_req/4,
- send_req/5,
- send_req/6,
- send_req_direct/4,
- send_req_direct/5,
- send_req_direct/6,
- send_req_direct/7,
- stream_next/1,
- set_max_sessions/3,
- set_max_pipeline_size/3,
- set_dest/3,
- trace_on/0,
- trace_off/0,
- trace_on/2,
- trace_off/2,
- all_trace_off/0,
- show_dest_status/0,
- show_dest_status/2
- ]).
+ rescan_config/0,
+ rescan_config/1,
+ get_config_value/1,
+ get_config_value/2,
+ spawn_worker_process/1,
+ spawn_worker_process/2,
+ spawn_link_worker_process/1,
+ spawn_link_worker_process/2,
+ stop_worker_process/1,
+ send_req/3,
+ send_req/4,
+ send_req/5,
+ send_req/6,
+ send_req_direct/4,
+ send_req_direct/5,
+ send_req_direct/6,
+ send_req_direct/7,
+ stream_next/1,
+ stream_close/1,
+ set_max_sessions/3,
+ set_max_pipeline_size/3,
+ set_dest/3,
+ trace_on/0,
+ trace_off/0,
+ trace_on/2,
+ trace_off/2,
+ all_trace_off/0,
+ show_dest_status/0,
+ show_dest_status/2
+ ]).
-ifdef(debug).
-compile(export_all).
-endif.
-import(ibrowse_lib, [
- parse_url/1,
- get_value/3,
- do_trace/2
- ]).
-
+ parse_url/1,
+ get_value/3,
+ do_trace/2
+ ]).
+
-record(state, {trace = false}).
-include("ibrowse.hrl").
@@ -159,7 +157,7 @@ stop() ->
send_req(Url, Headers, Method) ->
send_req(Url, Headers, Method, [], []).
-%% @doc Same as send_req/3.
+%% @doc Same as send_req/3.
%% If a list is specified for the body it has to be a flat list. The body can also be a fun/0 or a fun/1. <br/>
%% If fun/0, the connection handling process will repeatdely call the fun until it returns an error or eof. <pre>Fun() = {ok, Data} | eof</pre><br/>
%% If fun/1, the connection handling process will repeatedly call the fun with the supplied state until it returns an error or eof. <pre>Fun(State) = {ok, Data} | {ok, Data, NewState} | eof</pre>
@@ -169,19 +167,19 @@ send_req(Url, Headers, Method) ->
send_req(Url, Headers, Method, Body) ->
send_req(Url, Headers, Method, Body, []).
-%% @doc Same as send_req/4.
+%% @doc Same as send_req/4.
%% For a description of SSL Options, look in the <a href="http://www.erlang.org/doc/apps/ssl/index.html">ssl</a> manpage. If the
%% HTTP Version to use is not specified, the default is 1.1.
%% <br/>
-%% <p>The <code>host_header</code> option is useful in the case where ibrowse is
+%% <ul>
+%% <li>The <code>host_header</code> option is useful in the case where ibrowse is
%% connecting to a component such as <a
%% href="http://www.stunnel.org">stunnel</a> which then sets up a
%% secure connection to a webserver. In this case, the URL supplied to
%% ibrowse must have the stunnel host/port details, but that won't
%% make sense to the destination webserver. This option can then be
%% used to specify what should go in the <code>Host</code> header in
-%% the request.</p>
-%% <ul>
+%% the request.</li>
%% <li>The <code>stream_to</code> option can be used to have the HTTP
%% response streamed to a process as messages as data arrives on the
%% socket. If the calling process wishes to control the rate at which
@@ -204,7 +202,11 @@ send_req(Url, Headers, Method, Body) ->
%% dealing with large response bodies and/or slow links. In these
%% cases, it might be hard to estimate how long a request will take to
%% complete. In such cases, the client might want to timeout if no
-%% data has been received on the link for a certain time interval.</li>
+%% data has been received on the link for a certain time interval.
+%%
+%% This value is also used to close connections which are not in use for
+%% the specified timeout value.
+%% </li>
%%
%% <li>
%% The <code>connect_timeout</code> option is to specify how long the
@@ -220,12 +222,30 @@ send_req(Url, Headers, Method, Body) ->
%% ibrowse:send_req("http://www.example.com/cgi-bin/request", [], get, [], [{connect_timeout, 100}], 1000).
%% </code>
%% In the above invocation, if the connection isn't established within
-%% 100 milliseconds, the request will fail with
+%% 100 milliseconds, the request will fail with
%% <code>{error, conn_failed}</code>.<br/>
%% If connection setup succeeds, the total time allowed for the
%% request to complete will be 1000 milliseconds minus the time taken
%% for connection setup.
%% </li>
+%%
+%% <li> The <code>socket_options</code> option can be used to set
+%% specific options on the socket. The <code>{active, true | false | once}</code>
+%% and <code>{packet_type, Packet_type}</code> will be filtered out by ibrowse. </li>
+%%
+%% <li> The <code>headers_as_is</code> option is to enable the caller
+%% to send headers exactly as specified in the request without ibrowse
+%% adding some of its own. Required for some picky servers apparently. </li>
+%%
+%% <li>The <code>give_raw_headers</code> option is to enable the
+%% caller to get access to the raw status line and raw unparsed
+%% headers. Not quite sure why someone would want this, but one of my
+%% users asked for it, so here it is. </li>
+%%
+%% <li> The <code>preserve_chunked_encoding</code> option enables the caller
+%% to receive the raw data stream when the Transfer-Encoding of the server
+%% response is Chunked.
+%% </li>
%% </ul>
%%
%% @spec send_req(Url::string(), Headers::headerList(), Method::method(), Body::body(), Options::optionList()) -> response()
@@ -234,7 +254,7 @@ send_req(Url, Headers, Method, Body) ->
%% {response_format,response_format()}|
%% {stream_chunk_size, integer()} |
%% {max_pipeline_size, integer()} |
-%% {trace, boolean()} |
+%% {trace, boolean()} |
%% {is_ssl, boolean()} |
%% {ssl_options, [SSLOpt]} |
%% {pool_name, atom()} |
@@ -253,13 +273,19 @@ send_req(Url, Headers, Method, Body) ->
%% {host_header, string()} |
%% {inactivity_timeout, integer()} |
%% {connect_timeout, integer()} |
-%% {transfer_encoding, {chunked, ChunkSize}}
+%% {socket_options, Sock_opts} |
+%% {transfer_encoding, {chunked, ChunkSize}} |
+%% {headers_as_is, boolean()} |
+%% {give_raw_headers, boolean()} |
+%% {preserve_chunked_encoding,boolean()}
%%
%% stream_to() = process() | {process(), once}
%% process() = pid() | atom()
%% username() = string()
%% password() = string()
%% SSLOpt = term()
+%% Sock_opts = [Sock_opt]
+%% Sock_opt = term()
%% ChunkSize = integer()
%% srtf() = boolean() | filename()
%% filename() = string()
@@ -267,54 +293,76 @@ send_req(Url, Headers, Method, Body) ->
send_req(Url, Headers, Method, Body, Options) ->
send_req(Url, Headers, Method, Body, Options, 30000).
-%% @doc Same as send_req/5.
+%% @doc Same as send_req/5.
%% All timeout values are in milliseconds.
%% @spec send_req(Url, Headers::headerList(), Method::method(), Body::body(), Options::optionList(), Timeout) -> response()
%% Timeout = integer() | infinity
send_req(Url, Headers, Method, Body, Options, Timeout) ->
case catch parse_url(Url) of
- #url{host = Host,
- port = Port,
- protocol = Protocol} = Parsed_url ->
- Lb_pid = case ets:lookup(ibrowse_lb, {Host, Port}) of
- [] ->
- get_lb_pid(Parsed_url);
- [#lb_pid{pid = Lb_pid_1}] ->
- Lb_pid_1
- end,
- Max_sessions = get_max_sessions(Host, Port, Options),
- Max_pipeline_size = get_max_pipeline_size(Host, Port, Options),
- Options_1 = merge_options(Host, Port, Options),
- {SSLOptions, IsSSL} =
- case (Protocol == https) orelse
- get_value(is_ssl, Options_1, false) of
- false -> {[], false};
- true -> {get_value(ssl_options, Options_1, []), true}
- end,
- case ibrowse_lb:spawn_connection(Lb_pid, Parsed_url,
- Max_sessions,
- Max_pipeline_size,
- {SSLOptions, IsSSL}) of
- {ok, Conn_Pid} ->
- do_send_req(Conn_Pid, Parsed_url, Headers,
- Method, Body, Options_1, Timeout);
- Err ->
- Err
- end;
- Err ->
- {error, {url_parsing_failed, Err}}
+ #url{host = Host,
+ port = Port,
+ protocol = Protocol} = Parsed_url ->
+ Lb_pid = case ets:lookup(ibrowse_lb, {Host, Port}) of
+ [] ->
+ get_lb_pid(Parsed_url);
+ [#lb_pid{pid = Lb_pid_1}] ->
+ Lb_pid_1
+ end,
+ Max_sessions = get_max_sessions(Host, Port, Options),
+ Max_pipeline_size = get_max_pipeline_size(Host, Port, Options),
+ Options_1 = merge_options(Host, Port, Options),
+ {SSLOptions, IsSSL} =
+ case (Protocol == https) orelse
+ get_value(is_ssl, Options_1, false) of
+ false -> {[], false};
+ true -> {get_value(ssl_options, Options_1, []), true}
+ end,
+ try_routing_request(Lb_pid, Parsed_url,
+ Max_sessions,
+ Max_pipeline_size,
+ {SSLOptions, IsSSL},
+ Headers, Method, Body, Options_1, Timeout, 0);
+ Err ->
+ {error, {url_parsing_failed, Err}}
end.
+try_routing_request(Lb_pid, Parsed_url,
+ Max_sessions,
+ Max_pipeline_size,
+ {SSLOptions, IsSSL},
+ Headers, Method, Body, Options_1, Timeout, Try_count) when Try_count < 3 ->
+ case ibrowse_lb:spawn_connection(Lb_pid, Parsed_url,
+ Max_sessions,
+ Max_pipeline_size,
+ {SSLOptions, IsSSL}) of
+ {ok, Conn_Pid} ->
+ case do_send_req(Conn_Pid, Parsed_url, Headers,
+ Method, Body, Options_1, Timeout) of
+ {error, sel_conn_closed} ->
+ try_routing_request(Lb_pid, Parsed_url,
+ Max_sessions,
+ Max_pipeline_size,
+ {SSLOptions, IsSSL},
+ Headers, Method, Body, Options_1, Timeout, Try_count + 1);
+ Res ->
+ Res
+ end;
+ Err ->
+ Err
+ end;
+try_routing_request(_, _, _, _, _, _, _, _, _, _, _) ->
+ {error, retry_later}.
+
merge_options(Host, Port, Options) ->
Config_options = get_config_value({options, Host, Port}, []),
lists:foldl(
fun({Key, Val}, Acc) ->
- case lists:keysearch(Key, 1, Options) of
- false ->
- [{Key, Val} | Acc];
- _ ->
- Acc
- end
+ case lists:keysearch(Key, 1, Options) of
+ false ->
+ [{Key, Val} | Acc];
+ _ ->
+ Acc
+ end
end, Options, Config_options).
get_lb_pid(Url) ->
@@ -322,11 +370,27 @@ get_lb_pid(Url) ->
get_max_sessions(Host, Port, Options) ->
get_value(max_sessions, Options,
- get_config_value({max_sessions, Host, Port}, ?DEF_MAX_SESSIONS)).
+ get_config_value({max_sessions, Host, Port},
+ default_max_sessions())).
get_max_pipeline_size(Host, Port, Options) ->
get_value(max_pipeline_size, Options,
- get_config_value({max_pipeline_size, Host, Port}, ?DEF_MAX_PIPELINE_SIZE)).
+ get_config_value({max_pipeline_size, Host, Port},
+ default_max_pipeline_size())).
+
+default_max_sessions() ->
+ safe_get_env(ibrowse, default_max_sessions, ?DEF_MAX_SESSIONS).
+
+default_max_pipeline_size() ->
+ safe_get_env(ibrowse, default_max_pipeline_size, ?DEF_MAX_PIPELINE_SIZE).
+
+safe_get_env(App, Key, Def_val) ->
+ case application:get_env(App, Key) of
+ undefined ->
+ Def_val;
+ {ok, Val} ->
+ Val
+ end.
%% @doc Deprecated. Use set_max_sessions/3 and set_max_pipeline_size/3
%% for achieving the same effect.
@@ -343,7 +407,7 @@ set_dest(_Host, _Port, [H | _]) ->
exit({invalid_option, H});
set_dest(_, _, []) ->
ok.
-
+
%% @doc Set the maximum number of connections allowed to a specific Host:Port.
%% @spec set_max_sessions(Host::string(), Port::integer(), Max::integer()) -> ok
set_max_sessions(Host, Port, Max) when is_integer(Max), Max > 0 ->
@@ -356,21 +420,25 @@ set_max_pipeline_size(Host, Port, Max) when is_integer(Max), Max > 0 ->
do_send_req(Conn_Pid, Parsed_url, Headers, Method, Body, Options, Timeout) ->
case catch ibrowse_http_client:send_req(Conn_Pid, Parsed_url,
- Headers, Method, ensure_bin(Body),
- Options, Timeout) of
- {'EXIT', {timeout, _}} ->
- {error, req_timedout};
- {'EXIT', Reason} ->
- {error, {'EXIT', Reason}};
- {ok, St_code, Headers, Body} = Ret when is_binary(Body) ->
- case get_value(response_format, Options, list) of
- list ->
- {ok, St_code, Headers, binary_to_list(Body)};
- binary ->
- Ret
- end;
- Ret ->
- Ret
+ Headers, Method, ensure_bin(Body),
+ Options, Timeout) of
+ {'EXIT', {timeout, _}} ->
+ {error, req_timedout};
+ {'EXIT', {noproc, {gen_server, call, [Conn_Pid, _, _]}}} ->
+ {error, sel_conn_closed};
+ {error, connection_closed} ->
+ {error, sel_conn_closed};
+ {'EXIT', Reason} ->
+ {error, {'EXIT', Reason}};
+ {ok, St_code, Headers, Body} = Ret when is_binary(Body) ->
+ case get_value(response_format, Options, list) of
+ list ->
+ {ok, St_code, Headers, binary_to_list(Body)};
+ binary ->
+ Ret
+ end;
+ Ret ->
+ Ret
end.
ensure_bin(L) when is_list(L) -> list_to_binary(L);
@@ -391,12 +459,25 @@ ensure_bin({Fun, _} = Body) when is_function(Fun) -> Body.
%% <b>Note:</b> It is the responsibility of the calling process to control
%% pipeline size on such connections.
%%
+%% @spec spawn_worker_process(Url::string()) -> {ok, pid()}
+spawn_worker_process(Url) ->
+ ibrowse_http_client:start(Url).
+
+%% @doc Same as spawn_worker_process/1 but takes as input a Host and Port
+%% instead of a URL.
%% @spec spawn_worker_process(Host::string(), Port::integer()) -> {ok, pid()}
spawn_worker_process(Host, Port) ->
ibrowse_http_client:start({Host, Port}).
+%% @doc Same as spawn_worker_process/1 except the the calling process
+%% is linked to the worker process which is spawned.
+%% @spec spawn_link_worker_process(Url::string()) -> {ok, pid()}
+spawn_link_worker_process(Url) ->
+ ibrowse_http_client:start_link(Url).
+
%% @doc Same as spawn_worker_process/2 except the the calling process
%% is linked to the worker process which is spawned.
+%% @spec spawn_link_worker_process(Host::string(), Port::integer()) -> {ok, pid()}
spawn_link_worker_process(Host, Port) ->
ibrowse_http_client:start_link({Host, Port}).
@@ -426,30 +507,45 @@ send_req_direct(Conn_pid, Url, Headers, Method, Body, Options) ->
%% returned by spawn_worker_process/2 or spawn_link_worker_process/2
send_req_direct(Conn_pid, Url, Headers, Method, Body, Options, Timeout) ->
case catch parse_url(Url) of
- #url{host = Host,
- port = Port} = Parsed_url ->
- Options_1 = merge_options(Host, Port, Options),
- case do_send_req(Conn_pid, Parsed_url, Headers, Method, Body, Options_1, Timeout) of
- {error, {'EXIT', {noproc, _}}} ->
- {error, worker_is_dead};
- Ret ->
- Ret
- end;
- Err ->
- {error, {url_parsing_failed, Err}}
+ #url{host = Host,
+ port = Port} = Parsed_url ->
+ Options_1 = merge_options(Host, Port, Options),
+ case do_send_req(Conn_pid, Parsed_url, Headers, Method, Body, Options_1, Timeout) of
+ {error, {'EXIT', {noproc, _}}} ->
+ {error, worker_is_dead};
+ Ret ->
+ Ret
+ end;
+ Err ->
+ {error, {url_parsing_failed, Err}}
end.
%% @doc Tell ibrowse to stream the next chunk of data to the
%% caller. Should be used in conjunction with the
%% <code>stream_to</code> option
%% @spec stream_next(Req_id :: req_id()) -> ok | {error, unknown_req_id}
-stream_next(Req_id) ->
+stream_next(Req_id) ->
case ets:lookup(ibrowse_stream, {req_id_pid, Req_id}) of
- [] ->
- {error, unknown_req_id};
- [{_, Pid}] ->
- catch Pid ! {stream_next, Req_id},
- ok
+ [] ->
+ {error, unknown_req_id};
+ [{_, Pid}] ->
+ catch Pid ! {stream_next, Req_id},
+ ok
+ end.
+
+%% @doc Tell ibrowse to close the connection associated with the
+%% specified stream. Should be used in conjunction with the
+%% <code>stream_to</code> option. Note that all requests in progress on
+%% the connection which is serving this Req_id will be aborted, and an
+%% error returned.
+%% @spec stream_close(Req_id :: req_id()) -> ok | {error, unknown_req_id}
+stream_close(Req_id) ->
+ case ets:lookup(ibrowse_stream, {req_id_pid, Req_id}) of
+ [] ->
+ {error, unknown_req_id};
+ [{_, Pid}] ->
+ catch Pid ! {stream_close, Req_id},
+ ok
end.
%% @doc Turn tracing on for the ibrowse process
@@ -462,7 +558,7 @@ trace_off() ->
%% @doc Turn tracing on for all connections to the specified HTTP
%% server. Host is whatever is specified as the domain name in the URL
%% @spec trace_on(Host, Port) -> ok
-%% Host = string()
+%% Host = string()
%% Port = integer()
trace_on(Host, Port) ->
ibrowse ! {trace, true, Host, Port},
@@ -481,77 +577,80 @@ all_trace_off() ->
ibrowse ! all_trace_off,
ok.
+%% @doc Shows some internal information about load balancing. Info
+%% about workers spawned using spawn_worker_process/2 or
+%% spawn_link_worker_process/2 is not included.
show_dest_status() ->
Dests = lists:filter(fun({lb_pid, {Host, Port}, _}) when is_list(Host),
- is_integer(Port) ->
- true;
- (_) ->
- false
- end, ets:tab2list(ibrowse_lb)),
+ is_integer(Port) ->
+ true;
+ (_) ->
+ false
+ end, ets:tab2list(ibrowse_lb)),
All_ets = ets:all(),
io:format("~-40.40s | ~-5.5s | ~-10.10s | ~s~n",
- ["Server:port", "ETS", "Num conns", "LB Pid"]),
+ ["Server:port", "ETS", "Num conns", "LB Pid"]),
io:format("~80.80.=s~n", [""]),
lists:foreach(fun({lb_pid, {Host, Port}, Lb_pid}) ->
- case lists:dropwhile(
- fun(Tid) ->
- ets:info(Tid, owner) /= Lb_pid
- end, All_ets) of
- [] ->
- io:format("~40.40s | ~-5.5s | ~-5.5s | ~s~n",
- [Host ++ ":" ++ integer_to_list(Port),
- "",
- "",
- io_lib:format("~p", [Lb_pid])]
- );
- [Tid | _] ->
- catch (
- begin
- Size = ets:info(Tid, size),
- io:format("~40.40s | ~-5.5s | ~-5.5s | ~s~n",
- [Host ++ ":" ++ integer_to_list(Port),
- integer_to_list(Tid),
- integer_to_list(Size),
- io_lib:format("~p", [Lb_pid])]
- )
- end
- )
- end
- end, Dests).
-
+ case lists:dropwhile(
+ fun(Tid) ->
+ ets:info(Tid, owner) /= Lb_pid
+ end, All_ets) of
+ [] ->
+ io:format("~40.40s | ~-5.5s | ~-5.5s | ~s~n",
+ [Host ++ ":" ++ integer_to_list(Port),
+ "",
+ "",
+ io_lib:format("~p", [Lb_pid])]
+ );
+ [Tid | _] ->
+ catch (
+ begin
+ Size = ets:info(Tid, size),
+ io:format("~40.40s | ~-5.5s | ~-5.5s | ~s~n",
+ [Host ++ ":" ++ integer_to_list(Port),
+ io_lib:format("~p", [Tid]),
+ integer_to_list(Size),
+ io_lib:format("~p", [Lb_pid])]
+ )
+ end
+ )
+ end
+ end, Dests).
+
%% @doc Shows some internal information about load balancing to a
%% specified Host:Port. Info about workers spawned using
%% spawn_worker_process/2 or spawn_link_worker_process/2 is not
%% included.
show_dest_status(Host, Port) ->
case ets:lookup(ibrowse_lb, {Host, Port}) of
- [] ->
- no_active_processes;
- [#lb_pid{pid = Lb_pid}] ->
- io:format("Load Balancer Pid : ~p~n", [Lb_pid]),
- io:format("LB process msg q size : ~p~n", [(catch process_info(Lb_pid, message_queue_len))]),
- case lists:dropwhile(
- fun(Tid) ->
- ets:info(Tid, owner) /= Lb_pid
- end, ets:all()) of
- [] ->
- io:format("Couldn't locate ETS table for ~p~n", [Lb_pid]);
- [Tid | _] ->
- First = ets:first(Tid),
- Last = ets:last(Tid),
- Size = ets:info(Tid, size),
- io:format("LB ETS table id : ~p~n", [Tid]),
- io:format("Num Connections : ~p~n", [Size]),
- case Size of
- 0 ->
- ok;
- _ ->
- {First_p_sz, _} = First,
- {Last_p_sz, _} = Last,
- io:format("Smallest pipeline : ~1000.p~n", [First_p_sz]),
- io:format("Largest pipeline : ~1000.p~n", [Last_p_sz])
- end
- end
+ [] ->
+ no_active_processes;
+ [#lb_pid{pid = Lb_pid}] ->
+ io:format("Load Balancer Pid : ~p~n", [Lb_pid]),
+ io:format("LB process msg q size : ~p~n", [(catch process_info(Lb_pid, message_queue_len))]),
+ case lists:dropwhile(
+ fun(Tid) ->
+ ets:info(Tid, owner) /= Lb_pid
+ end, ets:all()) of
+ [] ->
+ io:format("Couldn't locate ETS table for ~p~n", [Lb_pid]);
+ [Tid | _] ->
+ First = ets:first(Tid),
+ Last = ets:last(Tid),
+ Size = ets:info(Tid, size),
+ io:format("LB ETS table id : ~p~n", [Tid]),
+ io:format("Num Connections : ~p~n", [Size]),
+ case Size of
+ 0 ->
+ ok;
+ _ ->
+ {First_p_sz, _} = First,
+ {Last_p_sz, _} = Last,
+ io:format("Smallest pipeline : ~1000.p~n", [First_p_sz]),
+ io:format("Largest pipeline : ~1000.p~n", [Last_p_sz])
+ end
+ end
end.
%% @doc Clear current configuration for ibrowse and load from the file
@@ -592,40 +691,40 @@ init(_) ->
import_config() ->
case code:priv_dir(ibrowse) of
- {error, _} = Err ->
- Err;
- PrivDir ->
- Filename = filename:join(PrivDir, "ibrowse.conf"),
- import_config(Filename)
+ {error, _} = Err ->
+ Err;
+ PrivDir ->
+ Filename = filename:join(PrivDir, "ibrowse.conf"),
+ import_config(Filename)
end.
import_config(Filename) ->
case file:consult(Filename) of
- {ok, Terms} ->
- ets:delete_all_objects(ibrowse_conf),
- Fun = fun({dest, Host, Port, MaxSess, MaxPipe, Options})
- when is_list(Host), is_integer(Port),
- is_integer(MaxSess), MaxSess > 0,
- is_integer(MaxPipe), MaxPipe > 0, is_list(Options) ->
- I = [{{max_sessions, Host, Port}, MaxSess},
- {{max_pipeline_size, Host, Port}, MaxPipe},
- {{options, Host, Port}, Options}],
- lists:foreach(
- fun({X, Y}) ->
- ets:insert(ibrowse_conf,
- #ibrowse_conf{key = X,
- value = Y})
- end, I);
- ({K, V}) ->
- ets:insert(ibrowse_conf,
- #ibrowse_conf{key = K,
- value = V});
- (X) ->
- io:format("Skipping unrecognised term: ~p~n", [X])
- end,
- lists:foreach(Fun, Terms);
- Err ->
- Err
+ {ok, Terms} ->
+ ets:delete_all_objects(ibrowse_conf),
+ Fun = fun({dest, Host, Port, MaxSess, MaxPipe, Options})
+ when is_list(Host), is_integer(Port),
+ is_integer(MaxSess), MaxSess > 0,
+ is_integer(MaxPipe), MaxPipe > 0, is_list(Options) ->
+ I = [{{max_sessions, Host, Port}, MaxSess},
+ {{max_pipeline_size, Host, Port}, MaxPipe},
+ {{options, Host, Port}, Options}],
+ lists:foreach(
+ fun({X, Y}) ->
+ ets:insert(ibrowse_conf,
+ #ibrowse_conf{key = X,
+ value = Y})
+ end, I);
+ ({K, V}) ->
+ ets:insert(ibrowse_conf,
+ #ibrowse_conf{key = K,
+ value = V});
+ (X) ->
+ io:format("Skipping unrecognised term: ~p~n", [X])
+ end,
+ lists:foreach(Fun, Terms);
+ Err ->
+ Err
end.
%% @doc Internal export
@@ -636,10 +735,10 @@ get_config_value(Key) ->
%% @doc Internal export
get_config_value(Key, DefVal) ->
case ets:lookup(ibrowse_conf, Key) of
- [] ->
- DefVal;
- [#ibrowse_conf{value = V}] ->
- V
+ [] ->
+ DefVal;
+ [#ibrowse_conf{value = V}] ->
+ V
end.
set_config_value(Key, Val) ->
@@ -660,6 +759,10 @@ handle_call({get_lb_pid, #url{host = Host, port = Port} = Url}, _From, State) ->
handle_call(stop, _From, State) ->
do_trace("IBROWSE shutting down~n", []),
+ ets:foldl(fun(#lb_pid{pid = Pid}, Acc) ->
+ ibrowse_lb:stop(Pid),
+ Acc
+ end, [], ibrowse_lb),
{stop, normal, ok, State};
handle_call({set_config_value, Key, Val}, _From, State) ->
@@ -700,36 +803,36 @@ handle_info(all_trace_off, State) ->
Mspec = [{{ibrowse_conf,{trace,'$1','$2'},true},[],[{{'$1','$2'}}]}],
Trace_on_dests = ets:select(ibrowse_conf, Mspec),
Fun = fun(#lb_pid{host_port = {H, P}, pid = Pid}, _) ->
- case lists:member({H, P}, Trace_on_dests) of
- false ->
- ok;
- true ->
- catch Pid ! {trace, false}
- end;
- (_, Acc) ->
- Acc
- end,
+ case lists:member({H, P}, Trace_on_dests) of
+ false ->
+ ok;
+ true ->
+ catch Pid ! {trace, false}
+ end;
+ (_, Acc) ->
+ Acc
+ end,
ets:foldl(Fun, undefined, ibrowse_lb),
ets:select_delete(ibrowse_conf, [{{ibrowse_conf,{trace,'$1','$2'},true},[],['true']}]),
{noreply, State};
-
+
handle_info({trace, Bool}, State) ->
put(my_trace_flag, Bool),
{noreply, State};
handle_info({trace, Bool, Host, Port}, State) ->
Fun = fun(#lb_pid{host_port = {H, P}, pid = Pid}, _)
- when H == Host,
- P == Port ->
- catch Pid ! {trace, Bool};
- (_, Acc) ->
- Acc
- end,
+ when H == Host,
+ P == Port ->
+ catch Pid ! {trace, Bool};
+ (_, Acc) ->
+ Acc
+ end,
ets:foldl(Fun, undefined, ibrowse_lb),
ets:insert(ibrowse_conf, #ibrowse_conf{key = {trace, Host, Port},
- value = Bool}),
+ value = Bool}),
{noreply, State};
-
+
handle_info(_Info, State) ->
{noreply, State}.
diff --git a/src/ibrowse/ibrowse_app.erl b/src/ibrowse/ibrowse_app.erl
index 8c83e8f1..d3a0f7bb 100644
--- a/src/ibrowse/ibrowse_app.erl
+++ b/src/ibrowse/ibrowse_app.erl
@@ -1,12 +1,11 @@
%%%-------------------------------------------------------------------
%%% File : ibrowse_app.erl
%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
+%%% Description :
%%%
%%% Created : 15 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
%%%-------------------------------------------------------------------
-module(ibrowse_app).
--vsn('$Id: ibrowse_app.erl,v 1.1 2005/05/05 22:28:28 chandrusf Exp $ ').
-behaviour(application).
%%--------------------------------------------------------------------
@@ -42,11 +41,11 @@
%% Func: start/2
%% Returns: {ok, Pid} |
%% {ok, Pid, State} |
-%% {error, Reason}
+%% {error, Reason}
%%--------------------------------------------------------------------
start(_Type, _StartArgs) ->
case ibrowse_sup:start_link() of
- {ok, Pid} ->
+ {ok, Pid} ->
{ok, Pid};
Error ->
Error
@@ -54,7 +53,7 @@ start(_Type, _StartArgs) ->
%%--------------------------------------------------------------------
%% Func: stop/1
-%% Returns: any
+%% Returns: any
%%--------------------------------------------------------------------
stop(_State) ->
ok.
diff --git a/src/ibrowse/ibrowse_http_client.erl b/src/ibrowse/ibrowse_http_client.erl
index 5f62f705..5ff323cd 100644
--- a/src/ibrowse/ibrowse_http_client.erl
+++ b/src/ibrowse/ibrowse_http_client.erl
@@ -6,8 +6,6 @@
%%% Created : 11 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
%%%-------------------------------------------------------------------
-module(ibrowse_http_client).
--vsn('$Id: ibrowse_http_client.erl,v 1.19 2009/07/01 22:43:19 chandrusf Exp $ ').
-
-behaviour(gen_server).
%%--------------------------------------------------------------------
%% Include files
@@ -16,11 +14,11 @@
%%--------------------------------------------------------------------
%% External exports
-export([
- start_link/1,
- start/1,
- stop/1,
- send_req/7
- ]).
+ start_link/1,
+ start/1,
+ stop/1,
+ send_req/7
+ ]).
-ifdef(debug).
-compile(export_all).
@@ -28,41 +26,47 @@
%% gen_server callbacks
-export([
- init/1,
- handle_call/3,
- handle_cast/2,
- handle_info/2,
- terminate/2,
- code_change/3
- ]).
+ init/1,
+ handle_call/3,
+ handle_cast/2,
+ handle_info/2,
+ terminate/2,
+ code_change/3
+ ]).
-include("ibrowse.hrl").
--record(state, {host, port,
- use_proxy = false, proxy_auth_digest,
- ssl_options = [], is_ssl = false, socket,
- reqs=queue:new(), cur_req, status=idle, http_status_code,
- reply_buffer = <<>>, rep_buf_size=0, streamed_size = 0,
- recvd_headers=[],
- is_closing, send_timer, content_length,
- deleted_crlf = false, transfer_encoding,
- chunk_size, chunk_size_buffer = <<>>, recvd_chunk_size,
- lb_ets_tid, cur_pipeline_size = 0, prev_req_id
- }).
+-record(state, {host, port, connect_timeout,
+ inactivity_timer_ref,
+ use_proxy = false, proxy_auth_digest,
+ ssl_options = [], is_ssl = false, socket,
+ proxy_tunnel_setup = false,
+ tunnel_setup_queue = [],
+ reqs=queue:new(), cur_req, status=idle, http_status_code,
+ reply_buffer = <<>>, rep_buf_size=0, streamed_size = 0,
+ recvd_headers=[],
+ status_line, raw_headers,
+ is_closing, send_timer, content_length,
+ deleted_crlf = false, transfer_encoding,
+ chunk_size, chunk_size_buffer = <<>>,
+ recvd_chunk_size, interim_reply_sent = false,
+ lb_ets_tid, cur_pipeline_size = 0, prev_req_id
+ }).
-record(request, {url, method, options, from,
- stream_to, caller_controls_socket = false,
- req_id,
- stream_chunk_size,
- save_response_to_file = false,
- tmp_file_name, tmp_file_fd,
- response_format}).
+ stream_to, caller_controls_socket = false,
+ caller_socket_options = [],
+ req_id,
+ stream_chunk_size,
+ save_response_to_file = false,
+ tmp_file_name, tmp_file_fd, preserve_chunked_encoding,
+ response_format}).
-import(ibrowse_lib, [
- get_value/2,
- get_value/3,
- do_trace/2
- ]).
+ get_value/2,
+ get_value/3,
+ do_trace/2
+ ]).
-define(DEFAULT_STREAM_CHUNK_SIZE, 1024*1024).
@@ -80,7 +84,13 @@ start_link(Args) ->
gen_server:start_link(?MODULE, Args, []).
stop(Conn_pid) ->
- gen_server:call(Conn_pid, stop).
+ case catch gen_server:call(Conn_pid, stop) of
+ {'EXIT', {timeout, _}} ->
+ exit(Conn_pid, kill),
+ ok;
+ _ ->
+ ok
+ end.
send_req(Conn_Pid, Url, Headers, Method, Body, Options, Timeout) ->
gen_server:call(
@@ -101,26 +111,23 @@ send_req(Conn_Pid, Url, Headers, Method, Body, Options, Timeout) ->
%%--------------------------------------------------------------------
init({Lb_Tid, #url{host = Host, port = Port}, {SSLOptions, Is_ssl}}) ->
State = #state{host = Host,
- port = Port,
- ssl_options = SSLOptions,
- is_ssl = Is_ssl,
- lb_ets_tid = Lb_Tid},
+ port = Port,
+ ssl_options = SSLOptions,
+ is_ssl = Is_ssl,
+ lb_ets_tid = Lb_Tid},
put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
{ok, State};
+init(Url) when is_list(Url) ->
+ case catch ibrowse_lib:parse_url(Url) of
+ #url{protocol = Protocol} = Url_rec ->
+ init({undefined, Url_rec, {[], Protocol == https}});
+ {'EXIT', _} ->
+ {error, invalid_url}
+ end;
init({Host, Port}) ->
State = #state{host = Host,
- port = Port},
- put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
- put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
- {ok, State};
-init(#url{host=Host, port=Port, protocol=Protocol}) ->
- State = #state{
- host = Host,
- port = Port,
- is_ssl = (Protocol == https),
- ssl_options = [{ssl_imp, new}]
- },
+ port = Port},
put(ibrowse_trace_token, [Host, $:, integer_to_list(Port)]),
put(my_trace_flag, ibrowse_lib:get_trace_status(Host, Port)),
{ok, State}.
@@ -141,13 +148,13 @@ handle_call({send_req, _}, _From, #state{is_closing = true} = State) ->
{reply, {error, connection_closing}, State};
handle_call({send_req, {Url, Headers, Method, Body, Options, Timeout}},
- From, State) ->
+ From, State) ->
send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State);
handle_call(stop, _From, State) ->
do_close(State),
do_error_reply(State, closing_on_request),
- {stop, normal, ok, State#state{socket=undefined}};
+ {stop, normal, ok, State};
handle_call(Request, _From, State) ->
Reply = {unknown_request, Request},
@@ -171,21 +178,28 @@ handle_cast(_Msg, State) ->
%% {stop, Reason, State} (terminate/2 is called)
%%--------------------------------------------------------------------
handle_info({tcp, _Sock, Data}, #state{status = Status} = State) ->
+%% io:format("Recvd data: ~p~n", [Data]),
do_trace("Data recvd in state: ~p. Size: ~p. ~p~n~n", [Status, size(Data), Data]),
handle_sock_data(Data, State);
handle_info({ssl, _Sock, Data}, State) ->
handle_sock_data(Data, State);
handle_info({stream_next, Req_id}, #state{socket = Socket,
- is_ssl = Is_ssl,
- cur_req = #request{req_id = Req_id}} = State) ->
- do_setopts(Socket, [{active, once}], Is_ssl),
+ cur_req = #request{req_id = Req_id}} = State) ->
+ %% io:format("Client process set {active, once}~n", []),
+ do_setopts(Socket, [{active, once}], State),
{noreply, State};
handle_info({stream_next, _Req_id}, State) ->
{noreply, State};
-handle_info({tcp_closed, _Sock}, State) ->
+handle_info({stream_close, _Req_id}, State) ->
+ shutting_down(State),
+ do_close(State),
+ do_error_reply(State, closing_on_request),
+ {stop, normal, ok, State};
+
+handle_info({tcp_closed, _Sock}, State) ->
do_trace("TCP connection closed by peer!~n", []),
handle_sock_closed(State),
{stop, normal, State};
@@ -195,25 +209,26 @@ handle_info({ssl_closed, _Sock}, State) ->
{stop, normal, State};
handle_info({tcp_error, _Sock}, State) ->
- io:format("Error on connection to ~1000.p:~1000.p~n", [State#state.host, State#state.port]),
+ do_trace("Error on connection to ~1000.p:~1000.p~n", [State#state.host, State#state.port]),
handle_sock_closed(State),
{stop, normal, State};
handle_info({ssl_error, _Sock}, State) ->
- io:format("Error on SSL connection to ~1000.p:~1000.p~n", [State#state.host, State#state.port]),
+ do_trace("Error on SSL connection to ~1000.p:~1000.p~n", [State#state.host, State#state.port]),
handle_sock_closed(State),
{stop, normal, State};
handle_info({req_timedout, From}, State) ->
- case lists:keysearch(From, #request.from, queue:to_list(State#state.reqs)) of
- false ->
- {noreply, State};
- {value, _} ->
- shutting_down(State),
- do_error_reply(State, req_timedout),
- {stop, normal, State}
+ case lists:keymember(From, #request.from, queue:to_list(State#state.reqs)) of
+ false ->
+ {noreply, State};
+ true ->
+ shutting_down(State),
+ do_error_reply(State, req_timedout),
+ {stop, normal, State}
end;
handle_info(timeout, State) ->
+ do_trace("Inactivity timeout triggered. Shutting down connection~n", []),
shutting_down(State),
do_error_reply(State, req_timedout),
{stop, normal, State};
@@ -224,7 +239,7 @@ handle_info({trace, Bool}, State) ->
handle_info(Info, State) ->
io:format("Unknown message recvd for ~1000.p:~1000.p -> ~p~n",
- [State#state.host, State#state.port, Info]),
+ [State#state.host, State#state.port, Info]),
io:format("Recvd unknown message ~p when in state: ~p~n", [Info, State]),
{noreply, State}.
@@ -234,7 +249,8 @@ handle_info(Info, State) ->
%% Returns: any (ignored by gen_server)
%%--------------------------------------------------------------------
terminate(_Reason, State) ->
- do_close(State).
+ do_close(State),
+ ok.
%%--------------------------------------------------------------------
%% Func: code_change/3
@@ -260,133 +276,160 @@ handle_sock_data(Data, #state{status=idle}=State) ->
handle_sock_data(Data, #state{status = get_header}=State) ->
case parse_response(Data, State) of
- {error, _Reason} ->
- shutting_down(State),
- {stop, normal, State};
- stop ->
- shutting_down(State),
- {stop, normal, State};
- State_1 ->
- active_once(State_1),
- {noreply, State_1, get_inac_timeout(State_1)}
+ {error, _Reason} ->
+ shutting_down(State),
+ {stop, normal, State};
+ State_1 ->
+ active_once(State_1),
+ State_2 = set_inac_timer(State_1),
+ {noreply, State_2}
end;
handle_sock_data(Data, #state{status = get_body,
- content_length = CL,
- http_status_code = StatCode,
- recvd_headers = Headers,
- chunk_size = CSz} = State) ->
+ socket = Socket,
+ content_length = CL,
+ http_status_code = StatCode,
+ recvd_headers = Headers,
+ chunk_size = CSz} = State) ->
case (CL == undefined) and (CSz == undefined) of
- true ->
- case accumulate_response(Data, State) of
- {error, Reason} ->
- shutting_down(State),
- fail_pipelined_requests(State,
- {error, {Reason, {stat_code, StatCode}, Headers}}),
- {stop, normal, State};
- State_1 ->
- active_once(State_1),
- {noreply, State_1, get_inac_timeout(State_1)}
- end;
- _ ->
- case parse_11_response(Data, State) of
- {error, Reason} ->
- shutting_down(State),
- fail_pipelined_requests(State,
- {error, {Reason, {stat_code, StatCode}, Headers}}),
- {stop, normal, State};
- stop ->
- shutting_down(State),
- {stop, normal, State};
- State_1 ->
- active_once(State_1),
- {noreply, State_1, get_inac_timeout(State_1)}
- end
+ true ->
+ case accumulate_response(Data, State) of
+ {error, Reason} ->
+ shutting_down(State),
+ fail_pipelined_requests(State,
+ {error, {Reason, {stat_code, StatCode}, Headers}}),
+ {stop, normal, State};
+ State_1 ->
+ active_once(State_1),
+ State_2 = set_inac_timer(State_1),
+ {noreply, State_2}
+ end;
+ _ ->
+ case parse_11_response(Data, State) of
+ {error, Reason} ->
+ shutting_down(State),
+ fail_pipelined_requests(State,
+ {error, {Reason, {stat_code, StatCode}, Headers}}),
+ {stop, normal, State};
+ #state{cur_req = #request{caller_controls_socket = Ccs},
+ interim_reply_sent = Irs} = State_1 ->
+ case Irs of
+ true ->
+ active_once(State_1);
+ false when Ccs == true ->
+ do_setopts(Socket, [{active, once}], State);
+ false ->
+ active_once(State_1)
+ end,
+ State_2 = State_1#state{interim_reply_sent = false},
+ State_3 = set_inac_timer(State_2),
+ {noreply, State_3};
+ State_1 ->
+ active_once(State_1),
+ State_2 = set_inac_timer(State_1),
+ {noreply, State_2}
+ end
end.
accumulate_response(Data,
- #state{
- cur_req = #request{save_response_to_file = true,
- tmp_file_fd = undefined} = CurReq,
- http_status_code=[$2 | _]}=State) ->
- TmpFilename = make_tmp_filename(),
+ #state{
+ cur_req = #request{save_response_to_file = Srtf,
+ tmp_file_fd = undefined} = CurReq,
+ http_status_code=[$2 | _]}=State) when Srtf /= false ->
+ TmpFilename = make_tmp_filename(Srtf),
case file:open(TmpFilename, [write, delayed_write, raw]) of
- {ok, Fd} ->
- accumulate_response(Data, State#state{
- cur_req = CurReq#request{
- tmp_file_fd = Fd,
- tmp_file_name = TmpFilename}});
- {error, Reason} ->
- {error, {file_open_error, Reason}}
+ {ok, Fd} ->
+ accumulate_response(Data, State#state{
+ cur_req = CurReq#request{
+ tmp_file_fd = Fd,
+ tmp_file_name = TmpFilename}});
+ {error, Reason} ->
+ {error, {file_open_error, Reason}}
end;
-accumulate_response(Data, #state{cur_req = #request{save_response_to_file = true,
- tmp_file_fd = Fd},
- transfer_encoding=chunked,
- reply_buffer = Reply_buf,
- http_status_code=[$2 | _]
- } = State) ->
+accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
+ tmp_file_fd = Fd},
+ transfer_encoding=chunked,
+ reply_buffer = Reply_buf,
+ http_status_code=[$2 | _]
+ } = State) when Srtf /= false ->
case file:write(Fd, [Reply_buf, Data]) of
- ok ->
- State#state{reply_buffer = <<>>};
- {error, Reason} ->
- {error, {file_write_error, Reason}}
+ ok ->
+ State#state{reply_buffer = <<>>};
+ {error, Reason} ->
+ {error, {file_write_error, Reason}}
end;
-accumulate_response(Data, #state{cur_req = #request{save_response_to_file = true,
- tmp_file_fd = Fd},
- reply_buffer = RepBuf,
- http_status_code=[$2 | _]
- } = State) ->
+accumulate_response(Data, #state{cur_req = #request{save_response_to_file = Srtf,
+ tmp_file_fd = Fd},
+ reply_buffer = RepBuf,
+ http_status_code=[$2 | _]
+ } = State) when Srtf /= false ->
case file:write(Fd, [RepBuf, Data]) of
- ok ->
- State#state{reply_buffer = <<>>};
- {error, Reason} ->
- {error, {file_write_error, Reason}}
+ ok ->
+ State#state{reply_buffer = <<>>};
+ {error, Reason} ->
+ {error, {file_write_error, Reason}}
end;
-accumulate_response(<<>>, State) ->
- State;
-accumulate_response(Data, #state{reply_buffer = RepBuf,
- rep_buf_size = RepBufSize,
- streamed_size = Streamed_size,
- cur_req = CurReq}=State) ->
- #request{stream_to=StreamTo, req_id=ReqId,
- stream_chunk_size = Stream_chunk_size,
- response_format = Response_format,
- caller_controls_socket = Caller_controls_socket} = CurReq,
- RepBuf_1 = concat_binary([RepBuf, Data]),
+%% accumulate_response(<<>>, #state{cur_req = #request{caller_controls_socket = Ccs},
+%% socket = Socket} = State) ->
+%% case Ccs of
+%% true ->
+%% do_setopts(Socket, [{active, once}], State);
+%% false ->
+%% ok
+%% end,
+%% State;
+accumulate_response(Data, #state{reply_buffer = RepBuf,
+ rep_buf_size = RepBufSize,
+ streamed_size = Streamed_size,
+ cur_req = CurReq}=State) ->
+ #request{stream_to = StreamTo,
+ req_id = ReqId,
+ stream_chunk_size = Stream_chunk_size,
+ response_format = Response_format,
+ caller_controls_socket = Caller_controls_socket} = CurReq,
+ RepBuf_1 = <<RepBuf/binary, Data/binary>>,
New_data_size = RepBufSize - Streamed_size,
case StreamTo of
- undefined ->
- State#state{reply_buffer = RepBuf_1};
- _ when Caller_controls_socket == true ->
- do_interim_reply(StreamTo, Response_format, ReqId, RepBuf_1),
- State#state{reply_buffer = <<>>,
- streamed_size = Streamed_size + size(RepBuf_1)};
- _ when New_data_size >= Stream_chunk_size ->
- {Stream_chunk, Rem_data} = split_binary(RepBuf_1, Stream_chunk_size),
- do_interim_reply(StreamTo, Response_format, ReqId, Stream_chunk),
- accumulate_response(
- Rem_data,
- State#state{
- reply_buffer = <<>>,
- streamed_size = Streamed_size + Stream_chunk_size});
- _ ->
- State#state{reply_buffer = RepBuf_1}
+ undefined ->
+ State#state{reply_buffer = RepBuf_1};
+ _ when Caller_controls_socket == true ->
+ do_interim_reply(StreamTo, Response_format, ReqId, RepBuf_1),
+ State#state{reply_buffer = <<>>,
+ interim_reply_sent = true,
+ streamed_size = Streamed_size + size(RepBuf_1)};
+ _ when New_data_size >= Stream_chunk_size ->
+ {Stream_chunk, Rem_data} = split_binary(RepBuf_1, Stream_chunk_size),
+ do_interim_reply(StreamTo, Response_format, ReqId, Stream_chunk),
+ State_1 = State#state{
+ reply_buffer = <<>>,
+ interim_reply_sent = true,
+ streamed_size = Streamed_size + Stream_chunk_size},
+ case Rem_data of
+ <<>> ->
+ State_1;
+ _ ->
+ accumulate_response(Rem_data, State_1)
+ end;
+ _ ->
+ State#state{reply_buffer = RepBuf_1}
end.
-make_tmp_filename() ->
+make_tmp_filename(true) ->
DownloadDir = ibrowse:get_config_value(download_dir, filename:absname("./")),
{A,B,C} = now(),
filename:join([DownloadDir,
- "ibrowse_tmp_file_"++
- integer_to_list(A) ++
- integer_to_list(B) ++
- integer_to_list(C)]).
+ "ibrowse_tmp_file_"++
+ integer_to_list(A) ++
+ integer_to_list(B) ++
+ integer_to_list(C)]);
+make_tmp_filename(File) when is_list(File) ->
+ File.
%%--------------------------------------------------------------------
%% Handles the case when the server closes the socket
%%--------------------------------------------------------------------
-handle_sock_closed(#state{status=get_header}=State) ->
+handle_sock_closed(#state{status=get_header} = State) ->
shutting_down(State),
do_error_reply(State, connection_closed);
@@ -397,40 +440,73 @@ handle_sock_closed(#state{cur_req=undefined} = State) ->
%% Connection-Close header and has closed the socket to indicate end
%% of response. There maybe requests pipelined which need a response.
handle_sock_closed(#state{reply_buffer = Buf, reqs = Reqs, http_status_code = SC,
- is_closing = IsClosing,
- cur_req = #request{tmp_file_name=TmpFilename,
- tmp_file_fd=Fd} = CurReq,
- status = get_body, recvd_headers = Headers}=State) ->
+ is_closing = IsClosing,
+ cur_req = #request{tmp_file_name=TmpFilename,
+ tmp_file_fd=Fd} = CurReq,
+ status = get_body,
+ recvd_headers = Headers,
+ status_line = Status_line,
+ raw_headers = Raw_headers
+ }=State) ->
#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format} = CurReq,
+ response_format = Resp_format,
+ options = Options} = CurReq,
case IsClosing of
- true ->
- {_, Reqs_1} = queue:out(Reqs),
- case TmpFilename of
- undefined ->
- do_reply(State, From, StreamTo, ReqId, Resp_format,
- {ok, SC, Headers, Buf});
- _ ->
- file:close(Fd),
- do_reply(State, From, StreamTo, ReqId, Resp_format,
- {ok, SC, Headers, {file, TmpFilename}})
- end,
- do_error_reply(State#state{reqs = Reqs_1}, connection_closed),
- State;
- _ ->
- do_error_reply(State, connection_closed),
- State
+ true ->
+ {_, Reqs_1} = queue:out(Reqs),
+ Body = case TmpFilename of
+ undefined ->
+ Buf;
+ _ ->
+ file:close(Fd),
+ {file, TmpFilename}
+ end,
+ Reply = case get_value(give_raw_headers, Options, false) of
+ true ->
+ {ok, Status_line, Raw_headers, Body};
+ false ->
+ {ok, SC, Headers, Buf}
+ end,
+ do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
+ do_error_reply(State#state{reqs = Reqs_1}, connection_closed),
+ State;
+ _ ->
+ do_error_reply(State, connection_closed),
+ State
end.
-do_connect(Host, Port, _Options, #state{is_ssl=true, ssl_options=SSLOptions}, Timeout) ->
+do_connect(Host, Port, Options, #state{is_ssl = true,
+ use_proxy = false,
+ ssl_options = SSLOptions},
+ Timeout) ->
+ Caller_socket_options = get_value(socket_options, Options, []),
+ Other_sock_options = filter_sock_options(SSLOptions ++ Caller_socket_options),
ssl:connect(Host, Port,
- [binary, {nodelay, true}, {active, false} | SSLOptions],
- Timeout);
-do_connect(Host, Port, _Options, _State, Timeout) ->
- gen_tcp:connect(Host, Port,
- [binary, {nodelay, true}, {active, false}],
- Timeout).
-
+ [binary, {nodelay, true}, {active, false} | Other_sock_options],
+ Timeout);
+do_connect(Host, Port, Options, _State, Timeout) ->
+ Caller_socket_options = get_value(socket_options, Options, []),
+ Other_sock_options = filter_sock_options(Caller_socket_options),
+ gen_tcp:connect(Host, to_integer(Port),
+ [binary, {nodelay, true}, {active, false} | Other_sock_options],
+ Timeout).
+
+%% We don't want the caller to specify certain options
+filter_sock_options(Opts) ->
+ lists:filter(fun({active, _}) ->
+ false;
+ ({packet, _}) ->
+ false;
+ (list) ->
+ false;
+ (_) ->
+ true
+ end, Opts).
+
+do_send(Req, #state{socket = Sock,
+ is_ssl = true,
+ use_proxy = true,
+ proxy_tunnel_setup = Pts}) when Pts /= done -> gen_tcp:send(Sock, Req);
do_send(Req, #state{socket = Sock, is_ssl = true}) -> ssl:send(Sock, Req);
do_send(Req, #state{socket = Sock, is_ssl = false}) -> gen_tcp:send(Sock, Req).
@@ -439,274 +515,375 @@ do_send(Req, #state{socket = Sock, is_ssl = false}) -> gen_tcp:send(Sock, Req).
%% {fun_arity_0} |
%% {fun_arity_1, term()}
%% error() = term()
-do_send_body(Source, State) when is_function(Source) ->
- do_send_body({Source}, State);
-do_send_body({Source}, State) when is_function(Source) ->
- do_send_body1(Source, Source(), State);
-do_send_body({Source, Source_state}, State) when is_function(Source) ->
- do_send_body1(Source, Source(Source_state), State);
-do_send_body(Body, State) ->
+do_send_body(Source, State, TE) when is_function(Source) ->
+ do_send_body({Source}, State, TE);
+do_send_body({Source}, State, TE) when is_function(Source) ->
+ do_send_body1(Source, Source(), State, TE);
+do_send_body({Source, Source_state}, State, TE) when is_function(Source) ->
+ do_send_body1(Source, Source(Source_state), State, TE);
+do_send_body(Body, State, _TE) ->
do_send(Body, State).
-do_send_body1(Source, Resp, State) ->
+do_send_body1(Source, Resp, State, TE) ->
case Resp of
- {ok, Data} ->
- do_send(Data, State),
- do_send_body({Source}, State);
- {ok, Data, New_source_state} ->
- do_send(Data, State),
- do_send_body({Source, New_source_state}, State);
- eof ->
- ok;
- Err ->
- Err
+ {ok, Data} ->
+ do_send(maybe_chunked_encode(Data, TE), State),
+ do_send_body({Source}, State, TE);
+ {ok, Data, New_source_state} ->
+ do_send(maybe_chunked_encode(Data, TE), State),
+ do_send_body({Source, New_source_state}, State, TE);
+ eof when TE == true ->
+ do_send(<<"0\r\n\r\n">>, State),
+ ok;
+ eof ->
+ ok;
+ Err ->
+ Err
end.
+maybe_chunked_encode(Data, false) ->
+ Data;
+maybe_chunked_encode(Data, true) ->
+ [ibrowse_lib:dec2hex(byte_size(to_binary(Data))), "\r\n", Data, "\r\n"].
+
do_close(#state{socket = undefined}) -> ok;
-do_close(#state{socket = Sock, is_ssl = true}) -> ssl:close(Sock);
-do_close(#state{socket = Sock, is_ssl = false}) -> gen_tcp:close(Sock).
+do_close(#state{socket = Sock,
+ is_ssl = true,
+ use_proxy = true,
+ proxy_tunnel_setup = Pts
+ }) when Pts /= done -> catch gen_tcp:close(Sock);
+do_close(#state{socket = Sock, is_ssl = true}) -> catch ssl:close(Sock);
+do_close(#state{socket = Sock, is_ssl = false}) -> catch gen_tcp:close(Sock).
active_once(#state{cur_req = #request{caller_controls_socket = true}}) ->
ok;
-active_once(#state{socket = Socket, is_ssl = Is_ssl}) ->
- do_setopts(Socket, [{active, once}], Is_ssl).
+active_once(#state{socket = Socket} = State) ->
+ do_setopts(Socket, [{active, once}], State).
-do_setopts(Sock, Opts, true) -> ssl:setopts(Sock, Opts);
-do_setopts(Sock, Opts, false) -> inet:setopts(Sock, Opts).
+do_setopts(_Sock, [], _) -> ok;
+do_setopts(Sock, Opts, #state{is_ssl = true,
+ use_proxy = true,
+ proxy_tunnel_setup = Pts}
+ ) when Pts /= done -> inet:setopts(Sock, Opts);
+do_setopts(Sock, Opts, #state{is_ssl = true}) -> ssl:setopts(Sock, Opts);
+do_setopts(Sock, Opts, _) -> inet:setopts(Sock, Opts).
check_ssl_options(Options, State) ->
case get_value(is_ssl, Options, false) of
- false ->
- State;
- true ->
- State#state{is_ssl=true, ssl_options=get_value(ssl_options, Options)}
+ false ->
+ State;
+ true ->
+ State#state{is_ssl=true, ssl_options=get_value(ssl_options, Options)}
end.
send_req_1(From,
- #url{host = Host,
- port = Port} = Url,
- Headers, Method, Body, Options, Timeout,
- #state{socket = undefined} = State) ->
+ #url{host = Host,
+ port = Port} = Url,
+ Headers, Method, Body, Options, Timeout,
+ #state{socket = undefined} = State) ->
{Host_1, Port_1, State_1} =
- case get_value(proxy_host, Options, false) of
- false ->
- {Host, Port, State};
- PHost ->
- ProxyUser = get_value(proxy_user, Options, []),
- ProxyPassword = get_value(proxy_password, Options, []),
- Digest = http_auth_digest(ProxyUser, ProxyPassword),
- {PHost, get_value(proxy_port, Options, 80),
- State#state{use_proxy = true,
- proxy_auth_digest = Digest}}
- end,
+ case get_value(proxy_host, Options, false) of
+ false ->
+ {Host, Port, State};
+ PHost ->
+ ProxyUser = get_value(proxy_user, Options, []),
+ ProxyPassword = get_value(proxy_password, Options, []),
+ Digest = http_auth_digest(ProxyUser, ProxyPassword),
+ {PHost, get_value(proxy_port, Options, 80),
+ State#state{use_proxy = true,
+ proxy_auth_digest = Digest}}
+ end,
State_2 = check_ssl_options(Options, State_1),
do_trace("Connecting...~n", []),
- Start_ts = now(),
Conn_timeout = get_value(connect_timeout, Options, Timeout),
case do_connect(Host_1, Port_1, Options, State_2, Conn_timeout) of
- {ok, Sock} ->
- do_trace("Connected!~n", []),
- End_ts = now(),
- Timeout_1 = case Timeout of
- infinity ->
- infinity;
- _ ->
- Timeout - trunc(round(timer:now_diff(End_ts, Start_ts) / 1000))
- end,
- State_3 = State_2#state{socket = Sock},
- send_req_1(From, Url, Headers, Method, Body, Options, Timeout_1, State_3);
- Err ->
- shutting_down(State_2),
- do_trace("Error connecting. Reason: ~1000.p~n", [Err]),
- gen_server:reply(From, {error, conn_failed}),
- {stop, normal, State_2}
+ {ok, Sock} ->
+ do_trace("Connected! Socket: ~1000.p~n", [Sock]),
+ State_3 = State_2#state{socket = Sock,
+ connect_timeout = Conn_timeout},
+ send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State_3);
+ Err ->
+ shutting_down(State_2),
+ do_trace("Error connecting. Reason: ~1000.p~n", [Err]),
+ gen_server:reply(From, {error, {conn_failed, Err}}),
+ {stop, normal, State_2}
end;
+
+%% Send a CONNECT request.
+%% Wait for 200 OK
+%% Upgrade to SSL connection
+%% Then send request
+
+send_req_1(From,
+ #url{
+ host = Server_host,
+ port = Server_port
+ } = Url,
+ Headers, Method, Body, Options, Timeout,
+ #state{
+ proxy_tunnel_setup = false,
+ use_proxy = true,
+ is_ssl = true} = State) ->
+ NewReq = #request{
+ method = connect,
+ preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false),
+ options = Options
+ },
+ State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
+ Pxy_auth_headers = maybe_modify_headers(Url, Method, Options, [], State_1),
+ Path = [Server_host, $:, integer_to_list(Server_port)],
+ {Req, Body_1} = make_request(connect, Pxy_auth_headers,
+ Path, Path,
+ [], Options, State_1),
+ TE = is_chunked_encoding_specified(Options),
+ trace_request(Req),
+ case do_send(Req, State) of
+ ok ->
+ case do_send_body(Body_1, State_1, TE) of
+ ok ->
+ trace_request_body(Body_1),
+ active_once(State_1),
+ Ref = case Timeout of
+ infinity ->
+ undefined;
+ _ ->
+ erlang:send_after(Timeout, self(), {req_timedout, From})
+ end,
+ State_2 = State_1#state{status = get_header,
+ cur_req = NewReq,
+ send_timer = Ref,
+ proxy_tunnel_setup = in_progress,
+ tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout}]},
+ State_3 = set_inac_timer(State_2),
+ {noreply, State_3};
+ Err ->
+ shutting_down(State_1),
+ do_trace("Send failed... Reason: ~p~n", [Err]),
+ gen_server:reply(From, {error, {send_failed, Err}}),
+ {stop, normal, State_1}
+ end;
+ Err ->
+ shutting_down(State_1),
+ do_trace("Send failed... Reason: ~p~n", [Err]),
+ gen_server:reply(From, {error, {send_failed, Err}}),
+ {stop, normal, State_1}
+ end;
+
+send_req_1(From, Url, Headers, Method, Body, Options, Timeout,
+ #state{proxy_tunnel_setup = in_progress,
+ tunnel_setup_queue = Q} = State) ->
+ do_trace("Queued SSL request awaiting tunnel setup: ~n"
+ "URL : ~s~n"
+ "Method : ~p~n"
+ "Headers : ~p~n", [Url, Method, Headers]),
+ {noreply, State#state{tunnel_setup_queue = [{From, Url, Headers, Method, Body, Options, Timeout} | Q]}};
+
send_req_1(From,
- #url{abspath = AbsPath,
- host = Host,
- port = Port,
- path = RelPath} = Url,
- Headers, Method, Body, Options, Timeout,
- #state{status = Status} = State) ->
+ #url{abspath = AbsPath,
+ path = RelPath} = Url,
+ Headers, Method, Body, Options, Timeout,
+ #state{status = Status,
+ socket = Socket,
+ is_ssl = Is_ssl} = State) ->
ReqId = make_req_id(),
Resp_format = get_value(response_format, Options, list),
+ Caller_socket_options = get_value(socket_options, Options, []),
{StreamTo, Caller_controls_socket} =
- case get_value(stream_to, Options, undefined) of
- {Caller, once} when is_pid(Caller) or
- is_atom(Caller) ->
- Async_pid_rec = {{req_id_pid, ReqId}, self()},
- true = ets:insert(ibrowse_stream, Async_pid_rec),
- {Caller, true};
- undefined ->
- {undefined, false};
- Caller when is_pid(Caller) or
- is_atom(Caller) ->
- {Caller, false};
- Stream_to_inv ->
- exit({invalid_option, {stream_to, Stream_to_inv}})
- end,
+ case get_value(stream_to, Options, undefined) of
+ {Caller, once} when is_pid(Caller) or
+ is_atom(Caller) ->
+ Async_pid_rec = {{req_id_pid, ReqId}, self()},
+ true = ets:insert(ibrowse_stream, Async_pid_rec),
+ {Caller, true};
+ undefined ->
+ {undefined, false};
+ Caller when is_pid(Caller) or
+ is_atom(Caller) ->
+ {Caller, false};
+ Stream_to_inv ->
+ exit({invalid_option, {stream_to, Stream_to_inv}})
+ end,
SaveResponseToFile = get_value(save_response_to_file, Options, false),
NewReq = #request{url = Url,
- method = Method,
- stream_to = StreamTo,
- caller_controls_socket = Caller_controls_socket,
- options = Options,
- req_id = ReqId,
- save_response_to_file = SaveResponseToFile,
- stream_chunk_size = get_stream_chunk_size(Options),
- response_format = Resp_format,
- from = From},
+ method = Method,
+ stream_to = StreamTo,
+ caller_controls_socket = Caller_controls_socket,
+ caller_socket_options = Caller_socket_options,
+ options = Options,
+ req_id = ReqId,
+ save_response_to_file = SaveResponseToFile,
+ stream_chunk_size = get_stream_chunk_size(Options),
+ response_format = Resp_format,
+ from = From,
+ preserve_chunked_encoding = get_value(preserve_chunked_encoding, Options, false)
+ },
State_1 = State#state{reqs=queue:in(NewReq, State#state.reqs)},
- Headers_1 = add_auth_headers(Url, Options, Headers, State),
- HostHeaderValue = case lists:keysearch(host_header, 1, Options) of
- false ->
- case Port of
- 80 -> Host;
- _ -> [Host, ":", integer_to_list(Port)]
- end;
- {value, {_, Host_h_val}} ->
- Host_h_val
- end,
+ Headers_1 = maybe_modify_headers(Url, Method, Options, Headers, State_1),
{Req, Body_1} = make_request(Method,
- [{"Host", HostHeaderValue} | Headers_1],
- AbsPath, RelPath, Body, Options, State#state.use_proxy),
- case get(my_trace_flag) of
- true ->
- %%Avoid the binary operations if trace is not on...
- NReq = binary_to_list(list_to_binary(Req)),
- do_trace("Sending request: ~n"
- "--- Request Begin ---~n~s~n"
- "--- Request End ---~n", [NReq]);
- _ -> ok
- end,
- case do_send(Req, State) of
- ok ->
- case do_send_body(Body_1, State) of
- ok ->
- State_2 = inc_pipeline_counter(State_1),
- active_once(State_1),
- Ref = case Timeout of
- infinity ->
- undefined;
- _ ->
- erlang:send_after(Timeout, self(), {req_timedout, From})
- end,
- State_3 = case Status of
- idle ->
- State_2#state{status = get_header,
- cur_req = NewReq,
- send_timer = Ref};
- _ ->
- State_2#state{send_timer = Ref}
- end,
- case StreamTo of
- undefined ->
- ok;
- _ ->
- gen_server:reply(From, {ibrowse_req_id, ReqId})
- end,
- {noreply, State_3, get_inac_timeout(State_3)};
- Err ->
- shutting_down(State_1),
- do_trace("Send failed... Reason: ~p~n", [Err]),
- gen_server:reply(From, {error, send_failed}),
- {stop, normal, State_1}
- end;
- Err ->
- shutting_down(State_1),
- do_trace("Send failed... Reason: ~p~n", [Err]),
- gen_server:reply(From, {error, send_failed}),
- {stop, normal, State_1}
+ Headers_1,
+ AbsPath, RelPath, Body, Options, State_1),
+ trace_request(Req),
+ do_setopts(Socket, Caller_socket_options, Is_ssl),
+ TE = is_chunked_encoding_specified(Options),
+ case do_send(Req, State_1) of
+ ok ->
+ case do_send_body(Body_1, State_1, TE) of
+ ok ->
+ trace_request_body(Body_1),
+ State_2 = inc_pipeline_counter(State_1),
+ active_once(State_2),
+ Ref = case Timeout of
+ infinity ->
+ undefined;
+ _ ->
+ erlang:send_after(Timeout, self(), {req_timedout, From})
+ end,
+ State_3 = case Status of
+ idle ->
+ State_2#state{status = get_header,
+ cur_req = NewReq,
+ send_timer = Ref};
+ _ ->
+ State_2#state{send_timer = Ref}
+ end,
+ case StreamTo of
+ undefined ->
+ ok;
+ _ ->
+ gen_server:reply(From, {ibrowse_req_id, ReqId})
+ end,
+ State_4 = set_inac_timer(State_3),
+ {noreply, State_4};
+ Err ->
+ shutting_down(State_1),
+ do_trace("Send failed... Reason: ~p~n", [Err]),
+ gen_server:reply(From, {error, {send_failed, Err}}),
+ {stop, normal, State_1}
+ end;
+ Err ->
+ shutting_down(State_1),
+ do_trace("Send failed... Reason: ~p~n", [Err]),
+ gen_server:reply(From, {error, {send_failed, Err}}),
+ {stop, normal, State_1}
+ end.
+
+maybe_modify_headers(#url{}, connect, _, Headers, State) ->
+ add_proxy_auth_headers(State, Headers);
+maybe_modify_headers(#url{host = Host, port = Port} = Url,
+ _Method,
+ Options, Headers, State) ->
+ case get_value(headers_as_is, Options, false) of
+ false ->
+ Headers_1 = add_auth_headers(Url, Options, Headers, State),
+ HostHeaderValue = case lists:keysearch(host_header, 1, Options) of
+ false ->
+ case Port of
+ 80 -> Host;
+ 443 -> Host;
+ _ -> [Host, ":", integer_to_list(Port)]
+ end;
+ {value, {_, Host_h_val}} ->
+ Host_h_val
+ end,
+ [{"Host", HostHeaderValue} | Headers_1];
+ true ->
+ Headers
end.
add_auth_headers(#url{username = User,
- password = UPw},
- Options,
- Headers,
- #state{use_proxy = UseProxy,
- proxy_auth_digest = ProxyAuthDigest}) ->
+ password = UPw},
+ Options,
+ Headers,
+ State) ->
Headers_1 = case User of
- undefined ->
- case get_value(basic_auth, Options, undefined) of
- undefined ->
- Headers;
- {U,P} ->
- [{"Authorization", ["Basic ", http_auth_digest(U, P)]} | Headers]
- end;
- _ ->
- [{"Authorization", ["Basic ", http_auth_digest(User, UPw)]} | Headers]
- end,
- case UseProxy of
- false ->
- Headers_1;
- true when ProxyAuthDigest == [] ->
- Headers_1;
- true ->
- [{"Proxy-Authorization", ["Basic ", ProxyAuthDigest]} | Headers_1]
- end.
+ undefined ->
+ case get_value(basic_auth, Options, undefined) of
+ undefined ->
+ Headers;
+ {U,P} ->
+ [{"Authorization", ["Basic ", http_auth_digest(U, P)]} | Headers]
+ end;
+ _ ->
+ [{"Authorization", ["Basic ", http_auth_digest(User, UPw)]} | Headers]
+ end,
+ add_proxy_auth_headers(State, Headers_1).
+
+add_proxy_auth_headers(#state{use_proxy = false}, Headers) ->
+ Headers;
+add_proxy_auth_headers(#state{proxy_auth_digest = []}, Headers) ->
+ Headers;
+add_proxy_auth_headers(#state{proxy_auth_digest = Auth_digest}, Headers) ->
+ [{"Proxy-Authorization", ["Basic ", Auth_digest]} | Headers].
http_auth_digest([], []) ->
[];
http_auth_digest(Username, Password) ->
- encode_base64(Username ++ [$: | Password]).
+ ibrowse_lib:encode_base64(Username ++ [$: | Password]).
-encode_base64([]) ->
- [];
-encode_base64([A]) ->
- [e(A bsr 2), e((A band 3) bsl 4), $=, $=];
-encode_base64([A,B]) ->
- [e(A bsr 2), e(((A band 3) bsl 4) bor (B bsr 4)), e((B band 15) bsl 2), $=];
-encode_base64([A,B,C|Ls]) ->
- encode_base64_do(A,B,C, Ls).
-encode_base64_do(A,B,C, Rest) ->
- BB = (A bsl 16) bor (B bsl 8) bor C,
- [e(BB bsr 18), e((BB bsr 12) band 63),
- e((BB bsr 6) band 63), e(BB band 63)|encode_base64(Rest)].
-
-e(X) when X >= 0, X < 26 -> X+65;
-e(X) when X>25, X<52 -> X+71;
-e(X) when X>51, X<62 -> X-4;
-e(62) -> $+;
-e(63) -> $/;
-e(X) -> exit({bad_encode_base64_token, X}).
-
-make_request(Method, Headers, AbsPath, RelPath, Body, Options, UseProxy) ->
+make_request(Method, Headers, AbsPath, RelPath, Body, Options,
+ #state{use_proxy = UseProxy, is_ssl = Is_ssl}) ->
HttpVsn = http_vsn_string(get_value(http_vsn, Options, {1,1})),
+ Fun1 = fun({X, Y}) when is_atom(X) ->
+ {to_lower(atom_to_list(X)), X, Y};
+ ({X, Y}) when is_list(X) ->
+ {to_lower(X), X, Y}
+ end,
+ Headers_0 = [Fun1(X) || X <- Headers],
Headers_1 =
- case get_value(content_length, Headers, false) of
- false when (Body == []) or
- (Body == <<>>) or
- is_tuple(Body) or
- is_function(Body) ->
- Headers;
- false when is_binary(Body) ->
- [{"content-length", integer_to_list(size(Body))} | Headers];
- false ->
- [{"content-length", integer_to_list(length(Body))} | Headers];
- _ ->
- Headers
- end,
+ case lists:keysearch("content-length", 1, Headers_0) of
+ false when (Body == []) orelse
+ (Body == <<>>) orelse
+ is_tuple(Body) orelse
+ is_function(Body) ->
+ Headers_0;
+ false when is_binary(Body) ->
+ [{"content-length", "content-length", integer_to_list(size(Body))} | Headers_0];
+ false when is_list(Body) ->
+ [{"content-length", "content-length", integer_to_list(length(Body))} | Headers_0];
+ _ ->
+ %% Content-Length is already specified
+ Headers_0
+ end,
{Headers_2, Body_1} =
- case get_value(transfer_encoding, Options, false) of
- false ->
- {Headers_1, Body};
- {chunked, ChunkSize} ->
- {[{X, Y} || {X, Y} <- Headers_1,
- X /= "Content-Length",
- X /= "content-length",
- X /= content_length] ++
- [{"Transfer-Encoding", "chunked"}],
- chunk_request_body(Body, ChunkSize)}
- end,
+ case is_chunked_encoding_specified(Options) of
+ false ->
+ {[{Y, Z} || {_, Y, Z} <- Headers_1], Body};
+ true ->
+ Chunk_size_1 = case get_value(transfer_encoding, Options) of
+ chunked ->
+ 5120;
+ {chunked, Chunk_size} ->
+ Chunk_size
+ end,
+ {[{Y, Z} || {X, Y, Z} <- Headers_1,
+ X /= "content-length"] ++
+ [{"Transfer-Encoding", "chunked"}],
+ chunk_request_body(Body, Chunk_size_1)}
+ end,
Headers_3 = cons_headers(Headers_2),
Uri = case get_value(use_absolute_uri, Options, false) or UseProxy of
- true ->
- AbsPath;
- false ->
- RelPath
- end,
+ true ->
+ case Is_ssl of
+ true ->
+ RelPath;
+ false ->
+ AbsPath
+ end;
+ false ->
+ RelPath
+ end,
{[method(Method), " ", Uri, " ", HttpVsn, crnl(), Headers_3, crnl()], Body_1}.
+is_chunked_encoding_specified(Options) ->
+ case get_value(transfer_encoding, Options, false) of
+ false ->
+ false;
+ {chunked, _} ->
+ true;
+ chunked ->
+ true
+ end.
+
http_vsn_string({0,9}) -> "HTTP/0.9";
http_vsn_string({1,0}) -> "HTTP/1.0";
http_vsn_string({1,1}) -> "HTTP/1.1".
@@ -717,7 +894,7 @@ cons_headers([], Acc) ->
encode_headers(Acc);
cons_headers([{basic_auth, {U,P}} | T], Acc) ->
cons_headers(T, [{"Authorization",
- ["Basic ", ibrowse_lib:encode_base64(U++":"++P)]} | Acc]);
+ ["Basic ", ibrowse_lib:encode_base64(U++":"++P)]} | Acc]);
cons_headers([{cookie, Cookie} | T], Acc) ->
cons_headers(T, [{"Cookie", Cookie} | Acc]);
cons_headers([{content_length, L} | T], Acc) ->
@@ -738,6 +915,9 @@ encode_headers([{Name,Val} | T], Acc) when is_atom(Name) ->
encode_headers([], Acc) ->
lists:reverse(Acc).
+chunk_request_body(Body, _ChunkSize) when is_tuple(Body) orelse
+ is_function(Body) ->
+ Body;
chunk_request_body(Body, ChunkSize) ->
chunk_request_body(Body, ChunkSize, []).
@@ -747,25 +927,24 @@ chunk_request_body(Body, _ChunkSize, Acc) when Body == <<>>; Body == [] ->
chunk_request_body(Body, ChunkSize, Acc) when is_binary(Body),
size(Body) >= ChunkSize ->
<<ChunkBody:ChunkSize/binary, Rest/binary>> = Body,
- Chunk = [ibrowse_lib:dec2hex(4, ChunkSize),"\r\n",
- ChunkBody, "\r\n"],
+ Chunk = [ibrowse_lib:dec2hex(ChunkSize),"\r\n",
+ ChunkBody, "\r\n"],
chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
chunk_request_body(Body, _ChunkSize, Acc) when is_binary(Body) ->
BodySize = size(Body),
- Chunk = [ibrowse_lib:dec2hex(4, BodySize),"\r\n",
- Body, "\r\n"],
+ Chunk = [ibrowse_lib:dec2hex(BodySize),"\r\n",
+ Body, "\r\n"],
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk, Chunk | Acc]);
-chunk_request_body(Body, ChunkSize, Acc) when is_list(Body),
- length(Body) >= ChunkSize ->
+chunk_request_body(Body, ChunkSize, Acc) when length(Body) >= ChunkSize ->
{ChunkBody, Rest} = split_list_at(Body, ChunkSize),
- Chunk = [ibrowse_lib:dec2hex(4, ChunkSize),"\r\n",
- ChunkBody, "\r\n"],
+ Chunk = [ibrowse_lib:dec2hex(ChunkSize),"\r\n",
+ ChunkBody, "\r\n"],
chunk_request_body(Rest, ChunkSize, [Chunk | Acc]);
chunk_request_body(Body, _ChunkSize, Acc) when is_list(Body) ->
BodySize = length(Body),
- Chunk = [ibrowse_lib:dec2hex(4, BodySize),"\r\n",
- Body, "\r\n"],
+ Chunk = [ibrowse_lib:dec2hex(BodySize),"\r\n",
+ Body, "\r\n"],
LastChunk = "0\r\n",
lists:reverse(["\r\n", LastChunk, Chunk | Acc]).
@@ -773,114 +952,172 @@ chunk_request_body(Body, _ChunkSize, Acc) when is_list(Body) ->
parse_response(_Data, #state{cur_req = undefined}=State) ->
State#state{status = idle};
parse_response(Data, #state{reply_buffer = Acc, reqs = Reqs,
- cur_req = CurReq} = State) ->
+ cur_req = CurReq} = State) ->
#request{from=From, stream_to=StreamTo, req_id=ReqId,
- method=Method, response_format = Resp_format} = CurReq,
+ method=Method, response_format = Resp_format,
+ options = Options
+ } = CurReq,
MaxHeaderSize = ibrowse:get_config_value(max_headers_size, infinity),
case scan_header(Acc, Data) of
- {yes, Headers, Data_1} ->
- do_trace("Recvd Header Data -> ~s~n----~n", [Headers]),
- do_trace("Recvd headers~n--- Headers Begin ---~n~s~n--- Headers End ---~n~n", [Headers]),
- {HttpVsn, StatCode, Headers_1} = parse_headers(Headers),
- do_trace("HttpVsn: ~p StatusCode: ~p Headers_1 -> ~1000.p~n", [HttpVsn, StatCode, Headers_1]),
- LCHeaders = [{to_lower(X), Y} || {X,Y} <- Headers_1],
- ConnClose = to_lower(get_value("connection", LCHeaders, "false")),
- IsClosing = is_connection_closing(HttpVsn, ConnClose),
- case IsClosing of
- true ->
+ {yes, Headers, Data_1} ->
+ do_trace("Recvd Header Data -> ~s~n----~n", [Headers]),
+ do_trace("Recvd headers~n--- Headers Begin ---~n~s~n--- Headers End ---~n~n", [Headers]),
+ {HttpVsn, StatCode, Headers_1, Status_line, Raw_headers} = parse_headers(Headers),
+ do_trace("HttpVsn: ~p StatusCode: ~p Headers_1 -> ~1000.p~n", [HttpVsn, StatCode, Headers_1]),
+ LCHeaders = [{to_lower(X), Y} || {X,Y} <- Headers_1],
+ ConnClose = to_lower(get_value("connection", LCHeaders, "false")),
+ IsClosing = is_connection_closing(HttpVsn, ConnClose),
+ case IsClosing of
+ true ->
shutting_down(State);
- false ->
- ok
- end,
- State_1 = State#state{recvd_headers=Headers_1, status=get_body,
- reply_buffer = <<>>,
- http_status_code=StatCode, is_closing=IsClosing},
- put(conn_close, ConnClose),
- TransferEncoding = to_lower(get_value("transfer-encoding", LCHeaders, "false")),
- case get_value("content-length", LCHeaders, undefined) of
- _ when Method == head ->
- {_, Reqs_1} = queue:out(Reqs),
- send_async_headers(ReqId, StreamTo, StatCode, Headers_1),
- State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
- {ok, StatCode, Headers_1, []}),
- cancel_timer(State_1_1#state.send_timer, {eat_message, {req_timedout, From}}),
- State_2 = reset_state(State_1_1),
- State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
- parse_response(Data_1, State_3);
- _ when hd(StatCode) == $1 ->
- %% No message body is expected. Server may send
- %% one or more 1XX responses before a proper
- %% response.
- send_async_headers(ReqId, StreamTo, StatCode, Headers_1),
- do_trace("Recvd a status code of ~p. Ignoring and waiting for a proper response~n", [StatCode]),
- parse_response(Data_1, State_1#state{recvd_headers = [],
- status = get_header});
- _ when StatCode == "204";
- StatCode == "304" ->
- %% No message body is expected for these Status Codes.
- %% RFC2616 - Sec 4.4
- {_, Reqs_1} = queue:out(Reqs),
- send_async_headers(ReqId, StreamTo, StatCode, Headers_1),
- State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
- {ok, StatCode, Headers_1, []}),
- cancel_timer(State_1_1#state.send_timer, {eat_message, {req_timedout, From}}),
- State_2 = reset_state(State_1_1),
- State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
- parse_response(Data_1, State_3);
- _ when TransferEncoding == "chunked" ->
- do_trace("Chunked encoding detected...~n",[]),
- send_async_headers(ReqId, StreamTo, StatCode, Headers_1),
- case parse_11_response(Data_1, State_1#state{transfer_encoding=chunked,
- chunk_size=chunk_start,
- reply_buffer = <<>>}) of
- {error, Reason} ->
- fail_pipelined_requests(State_1,
- {error, {Reason,
- {stat_code, StatCode}, Headers_1}}),
- {error, Reason};
- State_2 ->
- State_2
- end;
- undefined when HttpVsn == "HTTP/1.0";
- ConnClose == "close" ->
- send_async_headers(ReqId, StreamTo, StatCode, Headers_1),
- State_1#state{reply_buffer = Data_1};
- undefined ->
- fail_pipelined_requests(State_1,
- {error, {content_length_undefined,
- {stat_code, StatCode}, Headers}}),
- {error, content_length_undefined};
- V ->
- case catch list_to_integer(V) of
- V_1 when is_integer(V_1), V_1 >= 0 ->
- send_async_headers(ReqId, StreamTo, StatCode, Headers_1),
- do_trace("Recvd Content-Length of ~p~n", [V_1]),
- State_2 = State_1#state{rep_buf_size=0,
- reply_buffer = <<>>,
- content_length=V_1},
- case parse_11_response(Data_1, State_2) of
- {error, Reason} ->
- fail_pipelined_requests(State_1,
- {error, {Reason,
- {stat_code, StatCode}, Headers_1}}),
- {error, Reason};
- State_3 ->
- State_3
- end;
- _ ->
- fail_pipelined_requests(State_1,
- {error, {content_length_undefined,
- {stat_code, StatCode}, Headers}}),
- {error, content_length_undefined}
- end
- end;
- {no, Acc_1} when MaxHeaderSize == infinity ->
- State#state{reply_buffer = Acc_1};
- {no, Acc_1} when size(Acc_1) < MaxHeaderSize ->
- State#state{reply_buffer = Acc_1};
- {no, _Acc_1} ->
- fail_pipelined_requests(State, {error, max_headers_size_exceeded}),
- {error, max_headers_size_exceeded}
+ false ->
+ ok
+ end,
+ Give_raw_headers = get_value(give_raw_headers, Options, false),
+ State_1 = case Give_raw_headers of
+ true ->
+ State#state{recvd_headers=Headers_1, status=get_body,
+ reply_buffer = <<>>,
+ status_line = Status_line,
+ raw_headers = Raw_headers,
+ http_status_code=StatCode, is_closing=IsClosing};
+ false ->
+ State#state{recvd_headers=Headers_1, status=get_body,
+ reply_buffer = <<>>,
+ http_status_code=StatCode, is_closing=IsClosing}
+ end,
+ put(conn_close, ConnClose),
+ TransferEncoding = to_lower(get_value("transfer-encoding", LCHeaders, "false")),
+ case get_value("content-length", LCHeaders, undefined) of
+ _ when Method == connect,
+ hd(StatCode) == $2 ->
+ cancel_timer(State#state.send_timer),
+ {_, Reqs_1} = queue:out(Reqs),
+ upgrade_to_ssl(set_cur_request(State#state{reqs = Reqs_1,
+ recvd_headers = [],
+ status = idle
+ }));
+ _ when Method == connect ->
+ {_, Reqs_1} = queue:out(Reqs),
+ do_error_reply(State#state{reqs = Reqs_1},
+ {error, proxy_tunnel_failed}),
+ {error, proxy_tunnel_failed};
+ _ when Method == head ->
+ {_, Reqs_1} = queue:out(Reqs),
+ send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
+ State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
+ {ok, StatCode, Headers_1, []}),
+ cancel_timer(State_1_1#state.send_timer, {eat_message, {req_timedout, From}}),
+ State_2 = reset_state(State_1_1),
+ State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
+ parse_response(Data_1, State_3);
+ _ when hd(StatCode) =:= $1 ->
+ %% No message body is expected. Server may send
+ %% one or more 1XX responses before a proper
+ %% response.
+ send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
+ do_trace("Recvd a status code of ~p. Ignoring and waiting for a proper response~n", [StatCode]),
+ parse_response(Data_1, State_1#state{recvd_headers = [],
+ status = get_header});
+ _ when StatCode =:= "204";
+ StatCode =:= "304" ->
+ %% No message body is expected for these Status Codes.
+ %% RFC2616 - Sec 4.4
+ {_, Reqs_1} = queue:out(Reqs),
+ send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
+ State_1_1 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
+ {ok, StatCode, Headers_1, []}),
+ cancel_timer(State_1_1#state.send_timer, {eat_message, {req_timedout, From}}),
+ State_2 = reset_state(State_1_1),
+ State_3 = set_cur_request(State_2#state{reqs = Reqs_1}),
+ parse_response(Data_1, State_3);
+ _ when TransferEncoding =:= "chunked" ->
+ do_trace("Chunked encoding detected...~n",[]),
+ send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
+ case parse_11_response(Data_1, State_1#state{transfer_encoding=chunked,
+ chunk_size=chunk_start,
+ reply_buffer = <<>>}) of
+ {error, Reason} ->
+ fail_pipelined_requests(State_1,
+ {error, {Reason,
+ {stat_code, StatCode}, Headers_1}}),
+ {error, Reason};
+ State_2 ->
+ State_2
+ end;
+ undefined when HttpVsn =:= "HTTP/1.0";
+ ConnClose =:= "close" ->
+ send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
+ State_1#state{reply_buffer = Data_1};
+ undefined ->
+ fail_pipelined_requests(State_1,
+ {error, {content_length_undefined,
+ {stat_code, StatCode}, Headers}}),
+ {error, content_length_undefined};
+ V ->
+ case catch list_to_integer(V) of
+ V_1 when is_integer(V_1), V_1 >= 0 ->
+ send_async_headers(ReqId, StreamTo, Give_raw_headers, State_1),
+ do_trace("Recvd Content-Length of ~p~n", [V_1]),
+ State_2 = State_1#state{rep_buf_size=0,
+ reply_buffer = <<>>,
+ content_length=V_1},
+ case parse_11_response(Data_1, State_2) of
+ {error, Reason} ->
+ fail_pipelined_requests(State_1,
+ {error, {Reason,
+ {stat_code, StatCode}, Headers_1}}),
+ {error, Reason};
+ State_3 ->
+ State_3
+ end;
+ _ ->
+ fail_pipelined_requests(State_1,
+ {error, {content_length_undefined,
+ {stat_code, StatCode}, Headers}}),
+ {error, content_length_undefined}
+ end
+ end;
+ {no, Acc_1} when MaxHeaderSize == infinity ->
+ State#state{reply_buffer = Acc_1};
+ {no, Acc_1} when size(Acc_1) < MaxHeaderSize ->
+ State#state{reply_buffer = Acc_1};
+ {no, _Acc_1} ->
+ fail_pipelined_requests(State, {error, max_headers_size_exceeded}),
+ {error, max_headers_size_exceeded}
+ end.
+
+upgrade_to_ssl(#state{socket = Socket,
+ connect_timeout = Conn_timeout,
+ ssl_options = Ssl_options,
+ tunnel_setup_queue = Q} = State) ->
+ case ssl:connect(Socket, Ssl_options, Conn_timeout) of
+ {ok, Ssl_socket} ->
+ do_trace("Upgraded to SSL socket!!~n", []),
+ State_1 = State#state{socket = Ssl_socket,
+ proxy_tunnel_setup = done},
+ send_queued_requests(lists:reverse(Q), State_1);
+ Err ->
+ do_trace("Upgrade to SSL socket failed. Reson: ~p~n", [Err]),
+ do_error_reply(State, {error, {send_failed, Err}}),
+ {error, send_failed}
+ end.
+
+send_queued_requests([], State) ->
+ do_trace("Sent all queued requests via SSL connection~n", []),
+ State#state{tunnel_setup_queue = []};
+send_queued_requests([{From, Url, Headers, Method, Body, Options, Timeout} | Q],
+ State) ->
+ case send_req_1(From, Url, Headers, Method, Body, Options, Timeout, State) of
+ {noreply, State_1} ->
+ send_queued_requests(Q, State_1);
+ Err ->
+ do_trace("Error sending queued SSL request: ~n"
+ "URL : ~s~n"
+ "Method : ~p~n"
+ "Headers : ~p~n", [Url, Method, Headers]),
+ do_error_reply(State, {error, {send_failed, Err}}),
+ {error, send_failed}
end.
is_connection_closing("HTTP/0.9", _) -> true;
@@ -890,200 +1127,220 @@ is_connection_closing(_, _) -> false.
%% This clause determines the chunk size when given data from the beginning of the chunk
parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked,
- chunk_size = chunk_start,
- chunk_size_buffer = Chunk_sz_buf
- } = State) ->
+ #state{transfer_encoding = chunked,
+ chunk_size = chunk_start,
+ chunk_size_buffer = Chunk_sz_buf
+ } = State) ->
case scan_crlf(Chunk_sz_buf, DataRecvd) of
- {yes, ChunkHeader, Data_1} ->
- case parse_chunk_header(ChunkHeader) of
- {error, Reason} ->
- {error, Reason};
- ChunkSize ->
- %%
- %% Do we have to preserve the chunk encoding when
- %% streaming? NO. This should be transparent to the client
- %% process. Chunked encoding was only introduced to make
- %% it efficient for the server.
- %%
- RemLen = size(Data_1),
- do_trace("Determined chunk size: ~p. Already recvd: ~p~n", [ChunkSize, RemLen]),
- parse_11_response(Data_1, State#state{chunk_size_buffer = <<>>,
- deleted_crlf = true,
- recvd_chunk_size = 0,
- chunk_size = ChunkSize})
- end;
- {no, Data_1} ->
- State#state{chunk_size_buffer = Data_1}
+ {yes, ChunkHeader, Data_1} ->
+ State_1 = maybe_accumulate_ce_data(State, <<ChunkHeader/binary, $\r, $\n>>),
+ ChunkSize = parse_chunk_header(ChunkHeader),
+ %%
+ %% Do we have to preserve the chunk encoding when
+ %% streaming? NO. This should be transparent to the client
+ %% process. Chunked encoding was only introduced to make
+ %% it efficient for the server.
+ %%
+ RemLen = size(Data_1),
+ do_trace("Determined chunk size: ~p. Already recvd: ~p~n",
+ [ChunkSize, RemLen]),
+ parse_11_response(Data_1, State_1#state{chunk_size_buffer = <<>>,
+ deleted_crlf = true,
+ recvd_chunk_size = 0,
+ chunk_size = ChunkSize});
+ {no, Data_1} ->
+ State#state{chunk_size_buffer = Data_1}
end;
%% This clause is to remove the CRLF between two chunks
%%
parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked,
- chunk_size = tbd,
- chunk_size_buffer = Buf}=State) ->
+ #state{transfer_encoding = chunked,
+ chunk_size = tbd,
+ chunk_size_buffer = Buf
+ } = State) ->
case scan_crlf(Buf, DataRecvd) of
- {yes, _, NextChunk} ->
- State_1 = State#state{chunk_size = chunk_start,
- chunk_size_buffer = <<>>,
- deleted_crlf = true},
- parse_11_response(NextChunk, State_1);
- {no, Data_1} ->
- State#state{chunk_size_buffer = Data_1}
+ {yes, _, NextChunk} ->
+ State_1 = maybe_accumulate_ce_data(State, <<$\r, $\n>>),
+ State_2 = State_1#state{chunk_size = chunk_start,
+ chunk_size_buffer = <<>>,
+ deleted_crlf = true},
+ parse_11_response(NextChunk, State_2);
+ {no, Data_1} ->
+ State#state{chunk_size_buffer = Data_1}
end;
%% This clause deals with the end of a chunked transfer. ibrowse does
%% not support Trailers in the Chunked Transfer encoding. Any trailer
%% received is silently discarded.
parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked, chunk_size = 0,
- cur_req = CurReq,
- deleted_crlf = DelCrlf,
- chunk_size_buffer = Trailer, reqs = Reqs}=State) ->
+ #state{transfer_encoding = chunked, chunk_size = 0,
+ cur_req = CurReq,
+ deleted_crlf = DelCrlf,
+ chunk_size_buffer = Trailer,
+ reqs = Reqs} = State) ->
do_trace("Detected end of chunked transfer...~n", []),
DataRecvd_1 = case DelCrlf of
- false ->
- DataRecvd;
- true ->
- <<$\r, $\n, DataRecvd/binary>>
+ false ->
+ DataRecvd;
+ true ->
+ <<$\r, $\n, DataRecvd/binary>>
end,
case scan_header(Trailer, DataRecvd_1) of
- {yes, _TEHeaders, Rem} ->
- {_, Reqs_1} = queue:out(Reqs),
- State_1 = handle_response(CurReq, State#state{reqs = Reqs_1}),
- parse_response(Rem, reset_state(State_1));
- {no, Rem} ->
- State#state{chunk_size_buffer = Rem, deleted_crlf = false}
+ {yes, TEHeaders, Rem} ->
+ {_, Reqs_1} = queue:out(Reqs),
+ State_1 = maybe_accumulate_ce_data(State, <<TEHeaders/binary, $\r, $\n>>),
+ State_2 = handle_response(CurReq,
+ State_1#state{reqs = Reqs_1}),
+ parse_response(Rem, reset_state(State_2));
+ {no, Rem} ->
+ accumulate_response(<<>>, State#state{chunk_size_buffer = Rem, deleted_crlf = false})
end;
%% This clause extracts a chunk, given the size.
parse_11_response(DataRecvd,
- #state{transfer_encoding = chunked,
- chunk_size = CSz,
- recvd_chunk_size = Recvd_csz,
- rep_buf_size = RepBufSz} = State) ->
+ #state{transfer_encoding = chunked,
+ chunk_size = CSz,
+ recvd_chunk_size = Recvd_csz,
+ rep_buf_size = RepBufSz} = State) ->
NeedBytes = CSz - Recvd_csz,
DataLen = size(DataRecvd),
do_trace("Recvd more data: size: ~p. NeedBytes: ~p~n", [DataLen, NeedBytes]),
case DataLen >= NeedBytes of
- true ->
- {RemChunk, RemData} = split_binary(DataRecvd, NeedBytes),
- do_trace("Recvd another chunk...~n", []),
- do_trace("RemData -> ~p~n", [RemData]),
- case accumulate_response(RemChunk, State) of
- {error, Reason} ->
- do_trace("Error accumulating response --> ~p~n", [Reason]),
- {error, Reason};
- #state{} = State_1 ->
- State_2 = State_1#state{chunk_size=tbd},
- parse_11_response(RemData, State_2)
- end;
- false ->
- accumulate_response(DataRecvd,
- State#state{rep_buf_size = RepBufSz + DataLen,
- recvd_chunk_size = Recvd_csz + DataLen})
+ true ->
+ {RemChunk, RemData} = split_binary(DataRecvd, NeedBytes),
+ do_trace("Recvd another chunk...~p~n", [RemChunk]),
+ do_trace("RemData -> ~p~n", [RemData]),
+ case accumulate_response(RemChunk, State) of
+ {error, Reason} ->
+ do_trace("Error accumulating response --> ~p~n", [Reason]),
+ {error, Reason};
+ #state{} = State_1 ->
+ State_2 = State_1#state{chunk_size=tbd},
+ parse_11_response(RemData, State_2)
+ end;
+ false ->
+ accumulate_response(DataRecvd,
+ State#state{rep_buf_size = RepBufSz + DataLen,
+ recvd_chunk_size = Recvd_csz + DataLen})
end;
%% This clause to extract the body when Content-Length is specified
parse_11_response(DataRecvd,
- #state{content_length=CL, rep_buf_size=RepBufSz,
- reqs=Reqs}=State) ->
+ #state{content_length=CL, rep_buf_size=RepBufSz,
+ reqs=Reqs}=State) ->
NeedBytes = CL - RepBufSz,
DataLen = size(DataRecvd),
case DataLen >= NeedBytes of
- true ->
- {RemBody, Rem} = split_binary(DataRecvd, NeedBytes),
- {_, Reqs_1} = queue:out(Reqs),
- State_1 = accumulate_response(RemBody, State),
- State_2 = handle_response(State_1#state.cur_req, State_1#state{reqs=Reqs_1}),
- State_3 = reset_state(State_2),
- parse_response(Rem, State_3);
- false ->
- accumulate_response(DataRecvd, State#state{rep_buf_size = (RepBufSz+DataLen)})
+ true ->
+ {RemBody, Rem} = split_binary(DataRecvd, NeedBytes),
+ {_, Reqs_1} = queue:out(Reqs),
+ State_1 = accumulate_response(RemBody, State),
+ State_2 = handle_response(State_1#state.cur_req, State_1#state{reqs=Reqs_1}),
+ State_3 = reset_state(State_2),
+ parse_response(Rem, State_3);
+ false ->
+ accumulate_response(DataRecvd, State#state{rep_buf_size = (RepBufSz+DataLen)})
end.
+maybe_accumulate_ce_data(#state{cur_req = #request{preserve_chunked_encoding = false}} = State, _) ->
+ State;
+maybe_accumulate_ce_data(State, Data) ->
+ accumulate_response(Data, State).
+
handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format,
- save_response_to_file = SaveResponseToFile,
- tmp_file_name = TmpFilename,
- tmp_file_fd = Fd
- },
- #state{http_status_code = SCode,
- send_timer = ReqTimer,
- reply_buffer = RepBuf,
- recvd_headers = RespHeaders}=State) when SaveResponseToFile /= false ->
+ response_format = Resp_format,
+ save_response_to_file = SaveResponseToFile,
+ tmp_file_name = TmpFilename,
+ tmp_file_fd = Fd,
+ options = Options
+ },
+ #state{http_status_code = SCode,
+ status_line = Status_line,
+ raw_headers = Raw_headers,
+ send_timer = ReqTimer,
+ reply_buffer = RepBuf,
+ recvd_headers = RespHeaders}=State) when SaveResponseToFile /= false ->
Body = RepBuf,
- State_1 = set_cur_request(State),
file:close(Fd),
ResponseBody = case TmpFilename of
- undefined ->
- Body;
- _ ->
- {file, TmpFilename}
- end,
- State_2 = do_reply(State_1, From, StreamTo, ReqId, Resp_format,
- {ok, SCode, RespHeaders, ResponseBody}),
+ undefined ->
+ Body;
+ _ ->
+ {file, TmpFilename}
+ end,
+ {Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(RespHeaders, Raw_headers, Options),
+ Reply = case get_value(give_raw_headers, Options, false) of
+ true ->
+ {ok, Status_line, Raw_headers_1, ResponseBody};
+ false ->
+ {ok, SCode, Resp_headers_1, ResponseBody}
+ end,
+ State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
- State_2;
+ set_cur_request(State_1);
handle_response(#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format},
- #state{http_status_code=SCode, recvd_headers=RespHeaders,
- reply_buffer = RepBuf,
- send_timer=ReqTimer}=State) ->
+ response_format = Resp_format,
+ options = Options},
+ #state{http_status_code = SCode,
+ status_line = Status_line,
+ raw_headers = Raw_headers,
+ recvd_headers = Resp_headers,
+ reply_buffer = RepBuf,
+ send_timer = ReqTimer} = State) ->
Body = RepBuf,
-%% State_1 = set_cur_request(State),
- State_1 = case get(conn_close) of
- "close" ->
- do_reply(State, From, StreamTo, ReqId, Resp_format,
- {ok, SCode, RespHeaders, Body}),
- exit(normal);
- _ ->
- State_1_1 = do_reply(State, From, StreamTo, ReqId, Resp_format,
- {ok, SCode, RespHeaders, Body}),
- cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
- State_1_1
- end,
+ {Resp_headers_1, Raw_headers_1} = maybe_add_custom_headers(Resp_headers, Raw_headers, Options),
+ Reply = case get_value(give_raw_headers, Options, false) of
+ true ->
+ {ok, Status_line, Raw_headers_1, Body};
+ false ->
+ {ok, SCode, Resp_headers_1, Body}
+ end,
+ State_1 = do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
+ cancel_timer(ReqTimer, {eat_message, {req_timedout, From}}),
set_cur_request(State_1).
reset_state(State) ->
State#state{status = get_header,
- rep_buf_size = 0,
- streamed_size = 0,
- content_length = undefined,
- reply_buffer = <<>>,
- chunk_size_buffer = <<>>,
- recvd_headers = [],
- deleted_crlf = false,
- http_status_code = undefined,
- chunk_size = undefined,
- transfer_encoding = undefined}.
+ rep_buf_size = 0,
+ streamed_size = 0,
+ content_length = undefined,
+ reply_buffer = <<>>,
+ chunk_size_buffer = <<>>,
+ recvd_headers = [],
+ status_line = undefined,
+ raw_headers = undefined,
+ deleted_crlf = false,
+ http_status_code = undefined,
+ chunk_size = undefined,
+ transfer_encoding = undefined
+ }.
set_cur_request(#state{reqs = Reqs} = State) ->
case queue:to_list(Reqs) of
- [] ->
- State#state{cur_req = undefined};
- [NextReq | _] ->
- State#state{cur_req = NextReq}
+ [] ->
+ State#state{cur_req = undefined};
+ [NextReq | _] ->
+ State#state{cur_req = NextReq}
end.
parse_headers(Headers) ->
case scan_crlf(Headers) of
- {yes, StatusLine, T} ->
- parse_headers(StatusLine, T);
- {no, StatusLine} ->
- parse_headers(StatusLine, <<>>)
+ {yes, StatusLine, T} ->
+ parse_headers(StatusLine, T);
+ {no, StatusLine} ->
+ parse_headers(StatusLine, <<>>)
end.
parse_headers(StatusLine, Headers) ->
Headers_1 = parse_headers_1(Headers),
case parse_status_line(StatusLine) of
- {ok, HttpVsn, StatCode, _Msg} ->
- put(http_prot_vsn, HttpVsn),
- {HttpVsn, StatCode, Headers_1};
- _ -> %% A HTTP 0.9 response?
- put(http_prot_vsn, "HTTP/0.9"),
- {"HTTP/0.9", undefined, Headers}
+ {ok, HttpVsn, StatCode, _Msg} ->
+ put(http_prot_vsn, HttpVsn),
+ {HttpVsn, StatCode, Headers_1, StatusLine, Headers};
+ _ -> %% A HTTP 0.9 response?
+ put(http_prot_vsn, "HTTP/0.9"),
+ {"HTTP/0.9", undefined, Headers, StatusLine, Headers}
end.
% From RFC 2616
@@ -1094,22 +1351,22 @@ parse_headers(StatusLine, Headers) ->
% SP. A recipient MAY replace any linear white space with a single
% SP before interpreting the field value or forwarding the message
% downstream.
- parse_headers_1(B) when is_binary(B) ->
- parse_headers_1(binary_to_list(B));
- parse_headers_1(String) ->
- parse_headers_1(String, [], []).
+parse_headers_1(B) when is_binary(B) ->
+ parse_headers_1(binary_to_list(B));
+parse_headers_1(String) ->
+ parse_headers_1(String, [], []).
-parse_headers_1([$\n, H |T], [$\r | L], Acc) when H == 32;
- H == $\t ->
+parse_headers_1([$\n, H |T], [$\r | L], Acc) when H =:= 32;
+ H =:= $\t ->
parse_headers_1(lists:dropwhile(fun(X) ->
- is_whitespace(X)
- end, T), [32 | L], Acc);
+ is_whitespace(X)
+ end, T), [32 | L], Acc);
parse_headers_1([$\n|T], [$\r | L], Acc) ->
case parse_header(lists:reverse(L)) of
- invalid ->
- parse_headers_1(T, [], Acc);
- NewHeader ->
- parse_headers_1(T, [], [NewHeader | Acc])
+ invalid ->
+ parse_headers_1(T, [], Acc);
+ NewHeader ->
+ parse_headers_1(T, [], [NewHeader | Acc])
end;
parse_headers_1([H|T], L, Acc) ->
parse_headers_1(T, [H|L], Acc);
@@ -1117,11 +1374,11 @@ parse_headers_1([], [], Acc) ->
lists:reverse(Acc);
parse_headers_1([], L, Acc) ->
Acc_1 = case parse_header(lists:reverse(L)) of
- invalid ->
- Acc;
- NewHeader ->
- [NewHeader | Acc]
- end,
+ invalid ->
+ Acc;
+ NewHeader ->
+ [NewHeader | Acc]
+ end,
lists:reverse(Acc_1).
parse_status_line(Line) when is_binary(Line) ->
@@ -1132,6 +1389,8 @@ parse_status_line([32 | T], get_prot_vsn, ProtVsn, StatCode) ->
parse_status_line(T, get_status_code, ProtVsn, StatCode);
parse_status_line([32 | T], get_status_code, ProtVsn, StatCode) ->
{ok, lists:reverse(ProtVsn), lists:reverse(StatCode), T};
+parse_status_line([], get_status_code, ProtVsn, StatCode) ->
+ {ok, lists:reverse(ProtVsn), lists:reverse(StatCode), []};
parse_status_line([H | T], get_prot_vsn, ProtVsn, StatCode) ->
parse_status_line(T, get_prot_vsn, [H|ProtVsn], StatCode);
parse_status_line([H | T], get_status_code, ProtVsn, StatCode) ->
@@ -1139,10 +1398,9 @@ parse_status_line([H | T], get_status_code, ProtVsn, StatCode) ->
parse_status_line([], _, _, _) ->
http_09.
-parse_header(B) when is_binary(B) ->
- parse_header(binary_to_list(B));
parse_header(L) ->
parse_header(L, []).
+
parse_header([$: | V], Acc) ->
{lists:reverse(Acc), string:strip(V)};
parse_header([H | T], Acc) ->
@@ -1152,11 +1410,11 @@ parse_header([], _) ->
scan_header(Bin) ->
case get_crlf_crlf_pos(Bin, 0) of
- {yes, Pos} ->
- {Headers, <<_:4/binary, Body/binary>>} = split_binary(Bin, Pos),
- {yes, Headers, Body};
- no ->
- {no, Bin}
+ {yes, Pos} ->
+ {Headers, <<_:4/binary, Body/binary>>} = split_binary(Bin, Pos),
+ {yes, Headers, Body};
+ no ->
+ {no, Bin}
end.
scan_header(Bin1, Bin2) when size(Bin1) < 4 ->
@@ -1168,11 +1426,11 @@ scan_header(Bin1, Bin2) ->
<<Headers_prefix:Bin1_already_scanned_size/binary, Rest/binary>> = Bin1,
Bin_to_scan = <<Rest/binary, Bin2/binary>>,
case get_crlf_crlf_pos(Bin_to_scan, 0) of
- {yes, Pos} ->
- {Headers_suffix, <<_:4/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
- {yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
- no ->
- {no, <<Bin1/binary, Bin2/binary>>}
+ {yes, Pos} ->
+ {Headers_suffix, <<_:4/binary, Body/binary>>} = split_binary(Bin_to_scan, Pos),
+ {yes, <<Headers_prefix/binary, Headers_suffix/binary>>, Body};
+ no ->
+ {no, <<Bin1/binary, Bin2/binary>>}
end.
get_crlf_crlf_pos(<<$\r, $\n, $\r, $\n, _/binary>>, Pos) -> {yes, Pos};
@@ -1181,11 +1439,11 @@ get_crlf_crlf_pos(<<>>, _) -> no.
scan_crlf(Bin) ->
case get_crlf_pos(Bin) of
- {yes, Pos} ->
- {Prefix, <<_, _, Suffix/binary>>} = split_binary(Bin, Pos),
- {yes, Prefix, Suffix};
- no ->
- {no, Bin}
+ {yes, Pos} ->
+ {Prefix, <<_, _, Suffix/binary>>} = split_binary(Bin, Pos),
+ {yes, Prefix, Suffix};
+ no ->
+ {no, Bin}
end.
scan_crlf(<<>>, Bin2) ->
@@ -1199,11 +1457,11 @@ scan_crlf_1(Bin1_head_size, Bin1, Bin2) ->
<<Bin1_head:Bin1_head_size/binary, Bin1_tail/binary>> = Bin1,
Bin3 = <<Bin1_tail/binary, Bin2/binary>>,
case get_crlf_pos(Bin3) of
- {yes, Pos} ->
- {Prefix, <<_, _, Suffix/binary>>} = split_binary(Bin3, Pos),
- {yes, concat_binary([Bin1_head, Prefix]), Suffix};
- no ->
- {no, concat_binary([Bin1, Bin2])}
+ {yes, Pos} ->
+ {Prefix, <<_, _, Suffix/binary>>} = split_binary(Bin3, Pos),
+ {yes, list_to_binary([Bin1_head, Prefix]), Suffix};
+ no ->
+ {no, list_to_binary([Bin1, Bin2])}
end.
get_crlf_pos(Bin) ->
@@ -1213,13 +1471,6 @@ get_crlf_pos(<<$\r, $\n, _/binary>>, Pos) -> {yes, Pos};
get_crlf_pos(<<_, Rest/binary>>, Pos) -> get_crlf_pos(Rest, Pos + 1);
get_crlf_pos(<<>>, _) -> no.
-%% scan_crlf(<<$\n, T/binary>>, [$\r | L]) -> {yes, lists:reverse(L), T};
-%% scan_crlf(<<H, T/binary>>, L) -> scan_crlf(T, [H|L]);
-%% scan_crlf(<<>>, L) -> {no, L};
-%% scan_crlf([$\n|T], [$\r | L]) -> {yes, lists:reverse(L), T};
-%% scan_crlf([H|T], L) -> scan_crlf(T, [H|L]);
-%% scan_crlf([], L) -> {no, L}.
-
fmt_val(L) when is_list(L) -> L;
fmt_val(I) when is_integer(I) -> integer_to_list(I);
fmt_val(A) when is_atom(A) -> atom_to_list(A);
@@ -1240,7 +1491,8 @@ method(proppatch) -> "PROPPATCH";
method(lock) -> "LOCK";
method(unlock) -> "UNLOCK";
method(move) -> "MOVE";
-method(copy) -> "COPY".
+method(copy) -> "COPY";
+method(connect) -> "CONNECT".
%% From RFC 2616
%%
@@ -1250,19 +1502,19 @@ method(copy) -> "COPY".
% fields. This allows dynamically produced content to be transferred
% along with the information necessary for the recipient to verify
% that it has received the full message.
-% Chunked-Body = *chunk
-% last-chunk
-% trailer
-% CRLF
-% chunk = chunk-size [ chunk-extension ] CRLF
-% chunk-data CRLF
-% chunk-size = 1*HEX
-% last-chunk = 1*("0") [ chunk-extension ] CRLF
-% chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
-% chunk-ext-name = token
-% chunk-ext-val = token | quoted-string
-% chunk-data = chunk-size(OCTET)
-% trailer = *(entity-header CRLF)
+% Chunked-Body = *chunk
+% last-chunk
+% trailer
+% CRLF
+% chunk = chunk-size [ chunk-extension ] CRLF
+% chunk-data CRLF
+% chunk-size = 1*HEX
+% last-chunk = 1*("0") [ chunk-extension ] CRLF
+% chunk-extension= *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
+% chunk-ext-name = token
+% chunk-ext-val = token | quoted-string
+% chunk-data = chunk-size(OCTET)
+% trailer = *(entity-header CRLF)
% The chunk-size field is a string of hex digits indicating the size
% of the chunk. The chunked encoding is ended by any chunk whose size
% is zero, followed by the trailer, which is terminated by an empty
@@ -1271,8 +1523,6 @@ method(copy) -> "COPY".
%% The parsing implemented here discards all chunk extensions. It also
%% strips trailing spaces from the chunk size fields as Apache 1.3.27 was
%% sending them.
-parse_chunk_header([]) ->
- throw({error, invalid_chunk_size});
parse_chunk_header(ChunkHeader) ->
parse_chunk_header(ChunkHeader, []).
@@ -1280,10 +1530,10 @@ parse_chunk_header(<<$;, _/binary>>, Acc) ->
hexlist_to_integer(lists:reverse(Acc));
parse_chunk_header(<<H, T/binary>>, Acc) ->
case is_whitespace(H) of
- true ->
- parse_chunk_header(T, Acc);
- false ->
- parse_chunk_header(T, [H | Acc])
+ true ->
+ parse_chunk_header(T, Acc);
+ false ->
+ parse_chunk_header(T, [H | Acc])
end;
parse_chunk_header(<<>>, Acc) ->
hexlist_to_integer(lists:reverse(Acc)).
@@ -1294,24 +1544,45 @@ is_whitespace($\n) -> true;
is_whitespace($\t) -> true;
is_whitespace(_) -> false.
-
-send_async_headers(_ReqId, undefined, _StatCode, _Headers) ->
+send_async_headers(_ReqId, undefined, _, _State) ->
ok;
-send_async_headers(ReqId, StreamTo, StatCode, Headers) ->
- catch StreamTo ! {ibrowse_async_headers, ReqId, StatCode, Headers}.
+send_async_headers(ReqId, StreamTo, Give_raw_headers,
+ #state{status_line = Status_line, raw_headers = Raw_headers,
+ recvd_headers = Headers, http_status_code = StatCode,
+ cur_req = #request{options = Opts}
+ }) ->
+ {Headers_1, Raw_headers_1} = maybe_add_custom_headers(Headers, Raw_headers, Opts),
+ case Give_raw_headers of
+ false ->
+ catch StreamTo ! {ibrowse_async_headers, ReqId, StatCode, Headers_1};
+ true ->
+ catch StreamTo ! {ibrowse_async_headers, ReqId, Status_line, Raw_headers_1}
+ end.
+
+maybe_add_custom_headers(Headers, Raw_headers, Opts) ->
+ Custom_headers = get_value(add_custom_headers, Opts, []),
+ Headers_1 = Headers ++ Custom_headers,
+ Raw_headers_1 = case Custom_headers of
+ [_ | _] when is_binary(Raw_headers) ->
+ Custom_headers_bin = list_to_binary(string:join([[X, $:, Y] || {X, Y} <- Custom_headers], "\r\n")),
+ <<Raw_headers/binary, "\r\n", Custom_headers_bin/binary>>;
+ _ ->
+ Raw_headers
+ end,
+ {Headers_1, Raw_headers_1}.
format_response_data(Resp_format, Body) ->
case Resp_format of
- list when is_list(Body) ->
- flatten(Body);
- list when is_binary(Body) ->
- binary_to_list(Body);
- binary when is_list(Body) ->
- list_to_binary(Body);
- _ ->
- %% This is to cater for sending messages such as
- %% {chunk_start, _}, chunk_end etc
- Body
+ list when is_list(Body) ->
+ flatten(Body);
+ list when is_binary(Body) ->
+ binary_to_list(Body);
+ binary when is_list(Body) ->
+ list_to_binary(Body);
+ _ ->
+ %% This is to cater for sending messages such as
+ %% {chunk_start, _}, chunk_end etc
+ Body
end.
do_reply(State, From, undefined, _, Resp_format, {ok, St_code, Headers, Body}) ->
@@ -1322,14 +1593,14 @@ do_reply(State, From, undefined, _, _, Msg) ->
gen_server:reply(From, Msg),
dec_pipeline_counter(State);
do_reply(#state{prev_req_id = Prev_req_id} = State,
- _From, StreamTo, ReqId, Resp_format, {ok, _, _, Body}) ->
+ _From, StreamTo, ReqId, Resp_format, {ok, _, _, Body}) ->
State_1 = dec_pipeline_counter(State),
case Body of
- [] ->
- ok;
- _ ->
- Body_1 = format_response_data(Resp_format, Body),
- catch StreamTo ! {ibrowse_async_response, ReqId, Body_1}
+ [] ->
+ ok;
+ _ ->
+ Body_1 = format_response_data(Resp_format, Body),
+ catch StreamTo ! {ibrowse_async_response, ReqId, Body_1}
end,
catch StreamTo ! {ibrowse_async_response_end, ReqId},
%% We don't want to delete the Req-id to Pid mapping straightaway
@@ -1356,23 +1627,28 @@ do_interim_reply(StreamTo, Response_format, ReqId, Msg) ->
Msg_1 = format_response_data(Response_format, Msg),
catch StreamTo ! {ibrowse_async_response, ReqId, Msg_1}.
-do_error_reply(#state{reqs = Reqs} = State, Err) ->
+do_error_reply(#state{reqs = Reqs, tunnel_setup_queue = Tun_q} = State, Err) ->
ReqList = queue:to_list(Reqs),
lists:foreach(fun(#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format}) ->
- ets:delete(ibrowse_stream, {req_id_pid, ReqId}),
+ response_format = Resp_format}) ->
+ ets:delete(ibrowse_stream, {req_id_pid, ReqId}),
do_reply(State, From, StreamTo, ReqId, Resp_format, {error, Err})
- end, ReqList).
+ end, ReqList),
+ lists:foreach(
+ fun({From, _Url, _Headers, _Method, _Body, _Options, _Timeout}) ->
+ do_reply(State, From, undefined, undefined, undefined, Err)
+ end, Tun_q).
fail_pipelined_requests(#state{reqs = Reqs, cur_req = CurReq} = State, Reply) ->
{_, Reqs_1} = queue:out(Reqs),
#request{from=From, stream_to=StreamTo, req_id=ReqId,
- response_format = Resp_format} = CurReq,
+ response_format = Resp_format} = CurReq,
do_reply(State, From, StreamTo, ReqId, Resp_format, Reply),
do_error_reply(State#state{reqs = Reqs_1}, previous_request_failed).
split_list_at(List, N) ->
split_list_at(List, N, []).
+
split_list_at([], _, Acc) ->
{lists:reverse(Acc), []};
split_list_at(List2, 0, List1) ->
@@ -1382,6 +1658,7 @@ split_list_at([H | List2], N, List1) ->
hexlist_to_integer(List) ->
hexlist_to_integer(lists:reverse(List), 1, 0).
+
hexlist_to_integer([H | T], Multiplier, Acc) ->
hexlist_to_integer(T, Multiplier*16, Multiplier*to_ascii(H) + Acc);
hexlist_to_integer([], _, Acc) ->
@@ -1416,10 +1693,10 @@ cancel_timer(Ref) -> erlang:cancel_timer(Ref).
cancel_timer(Ref, {eat_message, Msg}) ->
cancel_timer(Ref),
receive
- Msg ->
- ok
+ Msg ->
+ ok
after 0 ->
- ok
+ ok
end.
make_req_id() ->
@@ -1437,7 +1714,7 @@ to_lower([], Acc) ->
shutting_down(#state{lb_ets_tid = undefined}) ->
ok;
shutting_down(#state{lb_ets_tid = Tid,
- cur_pipeline_size = Sz}) ->
+ cur_pipeline_size = Sz}) ->
catch ets:delete(Tid, {Sz, self()}).
inc_pipeline_counter(#state{is_closing = true} = State) ->
@@ -1450,7 +1727,7 @@ dec_pipeline_counter(#state{is_closing = true} = State) ->
dec_pipeline_counter(#state{lb_ets_tid = undefined} = State) ->
State;
dec_pipeline_counter(#state{cur_pipeline_size = Pipe_sz,
- lb_ets_tid = Tid} = State) ->
+ lb_ets_tid = Tid} = State) ->
ets:delete(Tid, {Pipe_sz, self()}),
ets:insert(Tid, {{Pipe_sz - 1, self()}, []}),
State#state{cur_pipeline_size = Pipe_sz - 1}.
@@ -1464,13 +1741,68 @@ flatten([]) ->
get_stream_chunk_size(Options) ->
case lists:keysearch(stream_chunk_size, 1, Options) of
- {value, {_, V}} when V > 0 ->
- V;
- _ ->
- ?DEFAULT_STREAM_CHUNK_SIZE
+ {value, {_, V}} when V > 0 ->
+ V;
+ _ ->
+ ?DEFAULT_STREAM_CHUNK_SIZE
end.
-get_inac_timeout(#state{cur_req = #request{options = Opts}}) ->
+set_inac_timer(State) ->
+ cancel_timer(State#state.inactivity_timer_ref),
+ set_inac_timer(State#state{inactivity_timer_ref = undefined},
+ get_inac_timeout(State)).
+
+set_inac_timer(State, Timeout) when is_integer(Timeout) ->
+ Ref = erlang:send_after(Timeout, self(), timeout),
+ State#state{inactivity_timer_ref = Ref};
+set_inac_timer(State, _) ->
+ State.
+
+get_inac_timeout(#state{cur_req = #request{options = Opts}}) ->
get_value(inactivity_timeout, Opts, infinity);
get_inac_timeout(#state{cur_req = undefined}) ->
- infinity.
+ case ibrowse:get_config_value(inactivity_timeout, undefined) of
+ Val when is_integer(Val) ->
+ Val;
+ _ ->
+ case application:get_env(ibrowse, inactivity_timeout) of
+ {ok, Val} when is_integer(Val), Val > 0 ->
+ Val;
+ _ ->
+ 10000
+ end
+ end.
+
+trace_request(Req) ->
+ case get(my_trace_flag) of
+ true ->
+ %%Avoid the binary operations if trace is not on...
+ NReq = to_binary(Req),
+ do_trace("Sending request: ~n"
+ "--- Request Begin ---~n~s~n"
+ "--- Request End ---~n", [NReq]);
+ _ -> ok
+ end.
+
+trace_request_body(Body) ->
+ case get(my_trace_flag) of
+ true ->
+ %%Avoid the binary operations if trace is not on...
+ NBody = to_binary(Body),
+ case size(NBody) > 1024 of
+ true ->
+ ok;
+ false ->
+ do_trace("Sending request body: ~n"
+ "--- Request Body Begin ---~n~s~n"
+ "--- Request Body End ---~n", [NBody])
+ end;
+ false ->
+ ok
+ end.
+
+to_integer(X) when is_list(X) -> list_to_integer(X);
+to_integer(X) when is_integer(X) -> X.
+
+to_binary(X) when is_list(X) -> list_to_binary(X);
+to_binary(X) when is_binary(X) -> X.
diff --git a/src/ibrowse/ibrowse_lb.erl b/src/ibrowse/ibrowse_lb.erl
index 834054a7..0e001d48 100644
--- a/src/ibrowse/ibrowse_lb.erl
+++ b/src/ibrowse/ibrowse_lb.erl
@@ -1,13 +1,11 @@
%%%-------------------------------------------------------------------
%%% File : ibrowse_lb.erl
%%% Author : chandru <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
+%%% Description :
%%%
%%% Created : 6 Mar 2008 by chandru <chandrashekhar.mullaparthi@t-mobile.co.uk>
%%%-------------------------------------------------------------------
-module(ibrowse_lb).
-
--vsn('$Id: ibrowse_lb.erl,v 1.2 2009/07/01 22:43:19 chandrusf Exp $ ').
-author(chandru).
-behaviour(gen_server).
%%--------------------------------------------------------------------
@@ -18,7 +16,8 @@
%% External exports
-export([
start_link/1,
- spawn_connection/5
+ spawn_connection/5,
+ stop/1
]).
%% gen_server callbacks
@@ -87,6 +86,14 @@ spawn_connection(Lb_pid, Url,
is_integer(Max_sessions) ->
gen_server:call(Lb_pid,
{spawn_connection, Url, Max_sessions, Max_pipeline_size, SSL_options}).
+
+stop(Lb_pid) ->
+ case catch gen_server:call(Lb_pid, stop) of
+ {'EXIT', {timeout, _}} ->
+ exit(Lb_pid, kill);
+ ok ->
+ ok
+ end.
%%--------------------------------------------------------------------
%% Function: handle_call/3
%% Description: Handling call messages
@@ -101,14 +108,14 @@ spawn_connection(Lb_pid, Url,
% #state{max_sessions = Max_sess,
% ets_tid = Tid,
% max_pipeline_size = Max_pipe_sz,
-% num_cur_sessions = Num} = State)
+% num_cur_sessions = Num} = State)
% when Num >= Max ->
% Reply = find_best_connection(Tid),
% {reply, sorry_dude_reuse, State};
%% Update max_sessions in #state with supplied value
handle_call({spawn_connection, _Url, Max_sess, Max_pipe, _}, _From,
- #state{num_cur_sessions = Num} = State)
+ #state{num_cur_sessions = Num} = State)
when Num >= Max_sess ->
State_1 = maybe_create_ets(State),
Reply = find_best_connection(State_1#state.ets_tid, Max_pipe),
@@ -122,6 +129,18 @@ handle_call({spawn_connection, Url, _Max_sess, _Max_pipe, SSL_options}, _From,
ets:insert(Tid, {{1, Pid}, []}),
{reply, {ok, Pid}, State_1#state{num_cur_sessions = Cur + 1}};
+handle_call(stop, _From, #state{ets_tid = undefined} = State) ->
+ gen_server:reply(_From, ok),
+ {stop, normal, State};
+
+handle_call(stop, _From, #state{ets_tid = Tid} = State) ->
+ ets:foldl(fun({{_, Pid}, _}, Acc) ->
+ ibrowse_http_client:stop(Pid),
+ Acc
+ end, [], Tid),
+ gen_server:reply(_From, ok),
+ {stop, normal, State};
+
handle_call(Request, _From, State) ->
Reply = {unknown_request, Request},
{reply, Reply, State}.
diff --git a/src/ibrowse/ibrowse_lib.erl b/src/ibrowse/ibrowse_lib.erl
index 6c7b1546..e913adbe 100644
--- a/src/ibrowse/ibrowse_lib.erl
+++ b/src/ibrowse/ibrowse_lib.erl
@@ -1,11 +1,10 @@
%%% File : ibrowse_lib.erl
%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
+%%% Description :
%%% Created : 27 Feb 2004 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
%% @doc Module with a few useful functions
-module(ibrowse_lib).
--vsn('$Id: ibrowse_lib.erl,v 1.6 2008/03/27 01:35:50 chandrusf Exp $ ').
-author('chandru').
-ifdef(debug).
-compile(export_all).
@@ -14,22 +13,22 @@
-include("ibrowse.hrl").
-export([
- get_trace_status/2,
- do_trace/2,
- do_trace/3,
- url_encode/1,
- decode_rfc822_date/1,
- status_code/1,
- dec2hex/2,
- drv_ue/1,
- drv_ue/2,
- encode_base64/1,
- decode_base64/1,
- get_value/2,
- get_value/3,
- parse_url/1,
- printable_date/0
- ]).
+ get_trace_status/2,
+ do_trace/2,
+ do_trace/3,
+ url_encode/1,
+ decode_rfc822_date/1,
+ status_code/1,
+ dec2hex/1,
+ drv_ue/1,
+ drv_ue/2,
+ encode_base64/1,
+ decode_base64/1,
+ get_value/2,
+ get_value/3,
+ parse_url/1,
+ printable_date/0
+ ]).
get_trace_status(Host, Port) ->
ibrowse:get_config_value({trace, Host, Port}, false).
@@ -39,10 +38,10 @@ drv_ue(Str) ->
drv_ue(Str, Port).
drv_ue(Str, Port) ->
case erlang:port_control(Port, 1, Str) of
- [] ->
- Str;
- Res ->
- Res
+ [] ->
+ Str;
+ Res ->
+ Res
end.
%% @doc URL-encodes a string based on RFC 1738. Returns a flat list.
@@ -72,10 +71,10 @@ d2h(N) -> N+$a-10.
decode_rfc822_date(String) when is_list(String) ->
case catch decode_rfc822_date_1(string:tokens(String, ", \t\r\n")) of
- {'EXIT', _} ->
- {error, invalid_date};
- Res ->
- Res
+ {'EXIT', _} ->
+ {error, invalid_date};
+ Res ->
+ Res
end.
% TODO: Have to handle the Zone
@@ -86,15 +85,15 @@ decode_rfc822_date_1([Day,Month,Year, Time,_Zone]) ->
MonthI = month_int(Month),
YearI = list_to_integer(Year),
TimeTup = case string:tokens(Time, ":") of
- [H,M] ->
- {list_to_integer(H),
- list_to_integer(M),
- 0};
- [H,M,S] ->
- {list_to_integer(H),
- list_to_integer(M),
- list_to_integer(S)}
- end,
+ [H,M] ->
+ {list_to_integer(H),
+ list_to_integer(M),
+ 0};
+ [H,M,S] ->
+ {list_to_integer(H),
+ list_to_integer(M),
+ list_to_integer(S)}
+ end,
{{YearI,MonthI,DayI}, TimeTup}.
month_int("Jan") -> 1;
@@ -110,7 +109,7 @@ month_int("Oct") -> 10;
month_int("Nov") -> 11;
month_int("Dec") -> 12.
-%% @doc Given a status code, returns an atom describing the status code.
+%% @doc Given a status code, returns an atom describing the status code.
%% @spec status_code(StatusCode::status_code()) -> StatusDescription
%% status_code() = string() | integer()
%% StatusDescription = atom()
@@ -164,100 +163,35 @@ status_code(507) -> insufficient_storage;
status_code(X) when is_list(X) -> status_code(list_to_integer(X));
status_code(_) -> unknown_status_code.
-%% @doc dec2hex taken from gtk.erl in std dist
-%% M = integer() -- number of hex digits required
+%% @doc Returns a string with the hexadecimal representation of a given decimal.
%% N = integer() -- the number to represent as hex
-%% @spec dec2hex(M::integer(), N::integer()) -> string()
-dec2hex(M,N) -> dec2hex(M,N,[]).
-
-dec2hex(0,_N,Ack) -> Ack;
-dec2hex(M,N,Ack) -> dec2hex(M-1,N bsr 4,[d2h(N band 15)|Ack]).
+%% @spec dec2hex(N::integer()) -> string()
+dec2hex(N) -> lists:flatten(io_lib:format("~.16B", [N])).
%% @doc Implements the base64 encoding algorithm. The output data type matches in the input data type.
%% @spec encode_base64(In) -> Out
%% In = string() | binary()
%% Out = string() | binary()
encode_base64(List) when is_list(List) ->
- encode_base64_1(list_to_binary(List));
+ binary_to_list(base64:encode(List));
encode_base64(Bin) when is_binary(Bin) ->
- List = encode_base64_1(Bin),
- list_to_binary(List).
-
-encode_base64_1(<<A:6, B:6, C:6, D:6, Rest/binary>>) ->
- [int_to_b64(A), int_to_b64(B),
- int_to_b64(C), int_to_b64(D) | encode_base64_1(Rest)];
-encode_base64_1(<<A:6, B:6, C:4>>) ->
- [int_to_b64(A), int_to_b64(B), int_to_b64(C bsl 2), $=];
-encode_base64_1(<<A:6, B:2>>) ->
- [int_to_b64(A), int_to_b64(B bsl 4), $=, $=];
-encode_base64_1(<<>>) ->
- [].
+ base64:encode(Bin).
%% @doc Implements the base64 decoding algorithm. The output data type matches in the input data type.
%% @spec decode_base64(In) -> Out | exit({error, invalid_input})
%% In = string() | binary()
%% Out = string() | binary()
decode_base64(List) when is_list(List) ->
- decode_base64_1(List, []);
+ binary_to_list(base64:decode(List));
decode_base64(Bin) when is_binary(Bin) ->
- List = decode_base64_1(binary_to_list(Bin), []),
- list_to_binary(List).
-
-decode_base64_1([H | T], Acc) when ((H == $\t) or
- (H == 32) or
- (H == $\r) or
- (H == $\n)) ->
- decode_base64_1(T, Acc);
-
-decode_base64_1([$=, $=], Acc) ->
- lists:reverse(Acc);
-decode_base64_1([$=, _ | _], _Acc) ->
- exit({error, invalid_input});
-
-decode_base64_1([A1, B1, $=, $=], Acc) ->
- A = b64_to_int(A1),
- B = b64_to_int(B1),
- Oct1 = (A bsl 2) bor (B bsr 4),
- decode_base64_1([], [Oct1 | Acc]);
-decode_base64_1([A1, B1, C1, $=], Acc) ->
- A = b64_to_int(A1),
- B = b64_to_int(B1),
- C = b64_to_int(C1),
- Oct1 = (A bsl 2) bor (B bsr 4),
- Oct2 = ((B band 16#f) bsl 6) bor (C bsr 2),
- decode_base64_1([], [Oct2, Oct1 | Acc]);
-decode_base64_1([A1, B1, C1, D1 | T], Acc) ->
- A = b64_to_int(A1),
- B = b64_to_int(B1),
- C = b64_to_int(C1),
- D = b64_to_int(D1),
- Oct1 = (A bsl 2) bor (B bsr 4),
- Oct2 = ((B band 16#f) bsl 4) bor (C bsr 2),
- Oct3 = ((C band 2#11) bsl 6) bor D,
- decode_base64_1(T, [Oct3, Oct2, Oct1 | Acc]);
-decode_base64_1([], Acc) ->
- lists:reverse(Acc).
-
-%% Taken from httpd_util.erl
-int_to_b64(X) when X >= 0, X =< 25 -> X + $A;
-int_to_b64(X) when X >= 26, X =< 51 -> X - 26 + $a;
-int_to_b64(X) when X >= 52, X =< 61 -> X - 52 + $0;
-int_to_b64(62) -> $+;
-int_to_b64(63) -> $/.
-
-%% Taken from httpd_util.erl
-b64_to_int(X) when X >= $A, X =< $Z -> X - $A;
-b64_to_int(X) when X >= $a, X =< $z -> X - $a + 26;
-b64_to_int(X) when X >= $0, X =< $9 -> X - $0 + 52;
-b64_to_int($+) -> 62;
-b64_to_int($/) -> 63.
+ base64:decode(Bin).
get_value(Tag, TVL, DefVal) ->
case lists:keysearch(Tag, 1, TVL) of
- false ->
- DefVal;
- {value, {_, Val}} ->
- Val
+ false ->
+ DefVal;
+ {value, {_, Val}} ->
+ Val
end.
get_value(Tag, TVL) ->
@@ -270,93 +204,121 @@ parse_url(Url) ->
parse_url([$:, $/, $/ | _], get_protocol, Url, []) ->
{invalid_uri_1, Url};
parse_url([$:, $/, $/ | T], get_protocol, Url, TmpAcc) ->
- Prot = list_to_atom(lists:reverse(TmpAcc)),
- parse_url(T, get_username,
- Url#url{protocol = Prot},
- []);
-parse_url([$/ | T], get_username, Url, TmpAcc) ->
+ Prot = list_to_existing_atom(lists:reverse(TmpAcc)),
+ parse_url(T, get_username,
+ Url#url{protocol = Prot},
+ []);
+parse_url([H | T], get_username, Url, TmpAcc) when H == $/;
+ H == $? ->
+ Path = case H of
+ $/ ->
+ [$/ | T];
+ $? ->
+ [$/, $? | T]
+ end,
%% No username/password. No port number
Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Url#url.protocol),
- path = [$/ | T]};
+ port = default_port(Url#url.protocol),
+ path = Path};
parse_url([$: | T], get_username, Url, TmpAcc) ->
%% It is possible that no username/password has been
%% specified. But we'll continue with the assumption that there is
%% a username/password. If we encounter a '@' later on, there is a
%% username/password indeed. If we encounter a '/', it was
%% actually the hostname
- parse_url(T, get_password,
- Url#url{username = lists:reverse(TmpAcc)},
- []);
+ parse_url(T, get_password,
+ Url#url{username = lists:reverse(TmpAcc)},
+ []);
parse_url([$@ | T], get_username, Url, TmpAcc) ->
- parse_url(T, get_host,
- Url#url{username = lists:reverse(TmpAcc),
- password = ""},
- []);
+ parse_url(T, get_host,
+ Url#url{username = lists:reverse(TmpAcc),
+ password = ""},
+ []);
parse_url([$@ | T], get_password, Url, TmpAcc) ->
- parse_url(T, get_host,
- Url#url{password = lists:reverse(TmpAcc)},
- []);
-parse_url([$/ | T], get_password, Url, TmpAcc) ->
+ parse_url(T, get_host,
+ Url#url{password = lists:reverse(TmpAcc)},
+ []);
+parse_url([H | T], get_password, Url, TmpAcc) when H == $/;
+ H == $? ->
%% Ok, what we thought was the username/password was the hostname
%% and portnumber
#url{username=User} = Url,
Port = list_to_integer(lists:reverse(TmpAcc)),
+ Path = case H of
+ $/ ->
+ [$/ | T];
+ $? ->
+ [$/, $? | T]
+ end,
Url#url{host = User,
- port = Port,
- username = undefined,
- password = undefined,
- path = [$/ | T]};
+ port = Port,
+ username = undefined,
+ password = undefined,
+ path = Path};
parse_url([$: | T], get_host, #url{} = Url, TmpAcc) ->
- parse_url(T, get_port,
- Url#url{host = lists:reverse(TmpAcc)},
- []);
-parse_url([$/ | T], get_host, #url{protocol=Prot} = Url, TmpAcc) ->
+ parse_url(T, get_port,
+ Url#url{host = lists:reverse(TmpAcc)},
+ []);
+parse_url([H | T], get_host, #url{protocol=Prot} = Url, TmpAcc) when H == $/;
+ H == $? ->
+ Path = case H of
+ $/ ->
+ [$/ | T];
+ $? ->
+ [$/, $? | T]
+ end,
Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Prot),
- path = [$/ | T]};
-parse_url([$/ | T], get_port, #url{protocol=Prot} = Url, TmpAcc) ->
+ port = default_port(Prot),
+ path = Path};
+parse_url([H | T], get_port, #url{protocol=Prot} = Url, TmpAcc) when H == $/;
+ H == $? ->
+ Path = case H of
+ $/ ->
+ [$/ | T];
+ $? ->
+ [$/, $? | T]
+ end,
Port = case TmpAcc of
- [] ->
- default_port(Prot);
- _ ->
- list_to_integer(lists:reverse(TmpAcc))
- end,
- Url#url{port = Port, path = [$/ | T]};
+ [] ->
+ default_port(Prot);
+ _ ->
+ list_to_integer(lists:reverse(TmpAcc))
+ end,
+ Url#url{port = Port, path = Path};
parse_url([H | T], State, Url, TmpAcc) ->
parse_url(T, State, Url, [H | TmpAcc]);
parse_url([], get_host, Url, TmpAcc) when TmpAcc /= [] ->
Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Url#url.protocol),
- path = "/"};
+ port = default_port(Url#url.protocol),
+ path = "/"};
parse_url([], get_username, Url, TmpAcc) when TmpAcc /= [] ->
Url#url{host = lists:reverse(TmpAcc),
- port = default_port(Url#url.protocol),
- path = "/"};
+ port = default_port(Url#url.protocol),
+ path = "/"};
parse_url([], get_port, #url{protocol=Prot} = Url, TmpAcc) ->
Port = case TmpAcc of
- [] ->
- default_port(Prot);
- _ ->
- list_to_integer(lists:reverse(TmpAcc))
- end,
- Url#url{port = Port,
- path = "/"};
+ [] ->
+ default_port(Prot);
+ _ ->
+ list_to_integer(lists:reverse(TmpAcc))
+ end,
+ Url#url{port = Port,
+ path = "/"};
parse_url([], get_password, Url, TmpAcc) ->
%% Ok, what we thought was the username/password was the hostname
%% and portnumber
#url{username=User} = Url,
Port = case TmpAcc of
- [] ->
- default_port(Url#url.protocol);
- _ ->
- list_to_integer(lists:reverse(TmpAcc))
- end,
+ [] ->
+ default_port(Url#url.protocol);
+ _ ->
+ list_to_integer(lists:reverse(TmpAcc))
+ end,
Url#url{host = User,
- port = Port,
- username = undefined,
- password = undefined,
- path = "/"};
+ port = Port,
+ username = undefined,
+ password = undefined,
+ path = "/"};
parse_url([], State, Url, TmpAcc) ->
{invalid_uri_2, State, Url, TmpAcc}.
@@ -387,13 +349,13 @@ do_trace(Fmt, Args) ->
-ifdef(DEBUG).
do_trace(_, Fmt, Args) ->
io:format("~s -- (~s) - "++Fmt,
- [printable_date(),
- get(ibrowse_trace_token) | Args]).
+ [printable_date(),
+ get(ibrowse_trace_token) | Args]).
-else.
do_trace(true, Fmt, Args) ->
io:format("~s -- (~s) - "++Fmt,
- [printable_date(),
- get(ibrowse_trace_token) | Args]);
+ [printable_date(),
+ get(ibrowse_trace_token) | Args]);
do_trace(_, _, _) ->
ok.
-endif.
diff --git a/src/ibrowse/ibrowse_sup.erl b/src/ibrowse/ibrowse_sup.erl
index 1b9b863a..ace33d16 100644
--- a/src/ibrowse/ibrowse_sup.erl
+++ b/src/ibrowse/ibrowse_sup.erl
@@ -1,13 +1,11 @@
%%%-------------------------------------------------------------------
%%% File : ibrowse_sup.erl
%%% Author : Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-%%% Description :
+%%% Description :
%%%
%%% Created : 15 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
%%%-------------------------------------------------------------------
-module(ibrowse_sup).
--vsn('$Id: ibrowse_sup.erl,v 1.1 2005/05/05 22:28:28 chandrusf Exp $ ').
-
-behaviour(supervisor).
%%--------------------------------------------------------------------
%% Include files
@@ -53,7 +51,7 @@ start_link() ->
%% Func: init/1
%% Returns: {ok, {SupFlags, [ChildSpec]}} |
%% ignore |
-%% {error, Reason}
+%% {error, Reason}
%%--------------------------------------------------------------------
init([]) ->
AChild = {ibrowse,{ibrowse,start_link,[]},
diff --git a/src/ibrowse/ibrowse_test.erl b/src/ibrowse/ibrowse_test.erl
index 3dc66ecf..3ad76603 100644
--- a/src/ibrowse/ibrowse_test.erl
+++ b/src/ibrowse/ibrowse_test.erl
@@ -4,7 +4,6 @@
%%% Created : 14 Oct 2003 by Chandrashekhar Mullaparthi <chandrashekhar.mullaparthi@t-mobile.co.uk>
-module(ibrowse_test).
--vsn('$Id: ibrowse_test.erl,v 1.4 2009/07/01 22:43:19 chandrusf Exp $ ').
-export([
load_test/3,
send_reqs_1/3,
@@ -18,6 +17,7 @@
ue_test/1,
verify_chunked_streaming/0,
verify_chunked_streaming/1,
+ test_chunked_streaming_once/0,
i_do_async_req_list/4,
test_stream_once/3,
test_stream_once/4
@@ -193,6 +193,7 @@ dump_errors(Key, Iod) ->
{"http://www.google.co.uk", get},
{"http://www.google.com", get},
{"http://www.google.com", options},
+ {"https://mail.google.com", get},
{"http://www.sun.com", get},
{"http://www.oracle.com", get},
{"http://www.bbc.co.uk", get},
@@ -216,16 +217,21 @@ dump_errors(Key, Iod) ->
{"http://jigsaw.w3.org/HTTP/300/", get},
{"http://jigsaw.w3.org/HTTP/Basic/", get, [{basic_auth, {"guest", "guest"}}]},
{"http://jigsaw.w3.org/HTTP/CL/", get},
- {"http://www.httpwatch.com/httpgallery/chunked/", get}
+ {"http://www.httpwatch.com/httpgallery/chunked/", get},
+ {"https://github.com", get, [{ssl_options, [{depth, 2}]}]}
]).
unit_tests() ->
unit_tests([]).
unit_tests(Options) ->
+ application:start(crypto),
+ application:start(public_key),
+ application:start(ssl),
+ ibrowse:start(),
Options_1 = Options ++ [{connect_timeout, 5000}],
{Pid, Ref} = erlang:spawn_monitor(?MODULE, unit_tests_1, [self(), Options_1]),
- receive
+ receive
{done, Pid} ->
ok;
{'DOWN', Ref, _, _, Info} ->
@@ -247,19 +253,45 @@ verify_chunked_streaming() ->
verify_chunked_streaming([]).
verify_chunked_streaming(Options) ->
+ io:format("~nVerifying that chunked streaming is working...~n", []),
Url = "http://www.httpwatch.com/httpgallery/chunked/",
- io:format("URL: ~s~n", [Url]),
- io:format("Fetching data without streaming...~n", []),
+ io:format(" URL: ~s~n", [Url]),
+ io:format(" Fetching data without streaming...~n", []),
Result_without_streaming = ibrowse:send_req(
Url, [], get, [],
[{response_format, binary} | Options]),
- io:format("Fetching data with streaming as list...~n", []),
+ io:format(" Fetching data with streaming as list...~n", []),
Async_response_list = do_async_req_list(
Url, get, [{response_format, list} | Options]),
- io:format("Fetching data with streaming as binary...~n", []),
+ io:format(" Fetching data with streaming as binary...~n", []),
Async_response_bin = do_async_req_list(
Url, get, [{response_format, binary} | Options]),
- compare_responses(Result_without_streaming, Async_response_list, Async_response_bin).
+ io:format(" Fetching data with streaming as binary, {active, once}...~n", []),
+ Async_response_bin_once = do_async_req_list(
+ Url, get, [once, {response_format, binary} | Options]),
+ Res1 = compare_responses(Result_without_streaming, Async_response_list, Async_response_bin),
+ Res2 = compare_responses(Result_without_streaming, Async_response_list, Async_response_bin_once),
+ case {Res1, Res2} of
+ {success, success} ->
+ io:format(" Chunked streaming working~n", []);
+ _ ->
+ ok
+ end.
+
+test_chunked_streaming_once() ->
+ test_chunked_streaming_once([]).
+
+test_chunked_streaming_once(Options) ->
+ io:format("~nTesting chunked streaming with the {stream_to, {Pid, once}} option...~n", []),
+ Url = "http://www.httpwatch.com/httpgallery/chunked/",
+ io:format(" URL: ~s~n", [Url]),
+ io:format(" Fetching data with streaming as binary, {active, once}...~n", []),
+ case do_async_req_list(Url, get, [once, {response_format, binary} | Options]) of
+ {ok, _, _, _} ->
+ io:format(" Success!~n", []);
+ Err ->
+ io:format(" Fail: ~p~n", [Err])
+ end.
compare_responses({ok, St_code, _, Body}, {ok, St_code, _, Body}, {ok, St_code, _, Body}) ->
success;
@@ -293,9 +325,9 @@ compare_responses(R1, R2, R3) ->
do_async_req_list(Url, Method, Options) ->
{Pid,_} = erlang:spawn_monitor(?MODULE, i_do_async_req_list,
- [self(), Url, Method,
+ [self(), Url, Method,
Options ++ [{stream_chunk_size, 1000}]]),
- io:format("Spawned process ~p~n", [Pid]),
+%% io:format("Spawned process ~p~n", [Pid]),
wait_for_resp(Pid).
wait_for_resp(Pid) ->
@@ -312,31 +344,54 @@ wait_for_resp(Pid) ->
Msg ->
io:format("Recvd unknown message: ~p~n", [Msg]),
wait_for_resp(Pid)
- after 10000 ->
+ after 100000 ->
{error, timeout}
end.
i_do_async_req_list(Parent, Url, Method, Options) ->
- Res = ibrowse:send_req(Url, [], Method, [], [{stream_to, self()} | Options]),
+ Options_1 = case lists:member(once, Options) of
+ true ->
+ [{stream_to, {self(), once}} | (Options -- [once])];
+ false ->
+ [{stream_to, self()} | Options]
+ end,
+ Res = ibrowse:send_req(Url, [], Method, [], Options_1),
case Res of
{ibrowse_req_id, Req_id} ->
- Result = wait_for_async_resp(Req_id, undefined, undefined, []),
+ Result = wait_for_async_resp(Req_id, Options, undefined, undefined, []),
Parent ! {async_result, self(), Result};
Err ->
Parent ! {async_result, self(), Err}
end.
-wait_for_async_resp(Req_id, Acc_Stat_code, Acc_Headers, Body) ->
+wait_for_async_resp(Req_id, Options, Acc_Stat_code, Acc_Headers, Body) ->
receive
{ibrowse_async_headers, Req_id, StatCode, Headers} ->
- wait_for_async_resp(Req_id, StatCode, Headers, Body);
+ %% io:format("Recvd headers...~n", []),
+ maybe_stream_next(Req_id, Options),
+ wait_for_async_resp(Req_id, Options, StatCode, Headers, Body);
{ibrowse_async_response_end, Req_id} ->
+ %% io:format("Recvd end of response.~n", []),
Body_1 = list_to_binary(lists:reverse(Body)),
{ok, Acc_Stat_code, Acc_Headers, Body_1};
{ibrowse_async_response, Req_id, Data} ->
- wait_for_async_resp(Req_id, Acc_Stat_code, Acc_Headers, [Data | Body]);
+ maybe_stream_next(Req_id, Options),
+ %% io:format("Recvd data...~n", []),
+ wait_for_async_resp(Req_id, Options, Acc_Stat_code, Acc_Headers, [Data | Body]);
+ {ibrowse_async_response, Req_id, {error, _} = Err} ->
+ {ok, Acc_Stat_code, Acc_Headers, Err};
Err ->
{ok, Acc_Stat_code, Acc_Headers, Err}
+ after 10000 ->
+ {timeout, Acc_Stat_code, Acc_Headers, Body}
+ end.
+
+maybe_stream_next(Req_id, Options) ->
+ case lists:member(once, Options) of
+ true ->
+ ibrowse:stream_next(Req_id);
+ false ->
+ ok
end.
execute_req(Url, Method, Options) ->
@@ -346,7 +401,7 @@ execute_req(Url, Method, Options) ->
{ok, SCode, _H, _B} ->
io:format("Status code: ~p~n", [SCode]);
Err ->
- io:format("Err -> ~p~n", [Err])
+ io:format("~p~n", [Err])
end.
drv_ue_test() ->
diff --git a/src/mochiweb/Makefile.am b/src/mochiweb/Makefile.am
index c191abfa..752118df 100644
--- a/src/mochiweb/Makefile.am
+++ b/src/mochiweb/Makefile.am
@@ -10,65 +10,86 @@
## License for the specific language governing permissions and limitations under
## the License.
-mochiwebebindir = $(localerlanglibdir)/mochiweb-r113/ebin
+mochiwebebindir = $(localerlanglibdir)/mochiweb-7c2bc2/ebin
mochiweb_file_collection = \
- mochifmt.erl \
- mochifmt_records.erl \
- mochifmt_std.erl \
- mochihex.erl \
- mochijson.erl \
- mochijson2.erl \
- mochinum.erl \
+ mochifmt.erl \
+ mochifmt_records.erl \
+ mochifmt_std.erl \
+ mochiglobal.erl \
+ mochihex.erl \
+ mochijson.erl \
+ mochijson2.erl \
+ mochilists.erl \
+ mochilogfile2.erl \
+ mochinum.erl \
+ mochitemp.erl \
+ mochiutf8.erl \
mochiweb.app.in \
- mochiweb.erl \
- mochiweb_app.erl \
- mochiweb_charref.erl \
- mochiweb_cookies.erl \
- mochiweb_echo.erl \
- mochiweb_headers.erl \
- mochiweb_html.erl \
- mochiweb_http.erl \
- mochiweb_multipart.erl \
- mochiweb_request.erl \
- mochiweb_response.erl \
- mochiweb_skel.erl \
- mochiweb_socket_server.erl \
- mochiweb_sup.erl \
- mochiweb_util.erl \
- reloader.erl
+ mochiweb.erl \
+ mochiweb_acceptor.erl \
+ mochiweb_app.erl \
+ mochiweb_charref.erl \
+ mochiweb_cookies.erl \
+ mochiweb_cover.erl \
+ mochiweb_echo.erl \
+ mochiweb_headers.erl \
+ mochiweb_html.erl \
+ mochiweb_http.erl \
+ mochiweb_io.erl \
+ mochiweb_mime.erl \
+ mochiweb_multipart.erl \
+ mochiweb_request.erl \
+ mochiweb_response.erl \
+ mochiweb_skel.erl \
+ mochiweb_socket.erl \
+ mochiweb_socket_server.erl \
+ mochiweb_sup.erl \
+ mochiweb_util.erl \
+ reloader.erl
mochiwebebin_make_generated_file_list = \
- mochifmt.beam \
- mochifmt_records.beam \
- mochifmt_std.beam \
- mochihex.beam \
- mochijson.beam \
- mochijson2.beam \
- mochinum.beam \
+ mochifmt.beam \
+ mochifmt_records.beam \
+ mochifmt_std.beam \
+ mochiglobal.beam \
+ mochihex.beam \
+ mochijson.beam \
+ mochijson2.beam \
+ mochilists.beam \
+ mochilogfile2.beam \
+ mochinum.beam \
+ mochitemp.beam \
+ mochiutf8.beam \
mochiweb.app \
- mochiweb.beam \
- mochiweb_app.beam \
- mochiweb_charref.beam \
- mochiweb_cookies.beam \
- mochiweb_echo.beam \
- mochiweb_headers.beam \
- mochiweb_html.beam \
- mochiweb_http.beam \
- mochiweb_multipart.beam \
- mochiweb_request.beam \
- mochiweb_response.beam \
- mochiweb_skel.beam \
- mochiweb_socket_server.beam \
- mochiweb_sup.beam \
- mochiweb_util.beam \
- reloader.beam
+ mochiweb.beam \
+ mochiweb_acceptor.beam \
+ mochiweb_app.beam \
+ mochiweb_charref.beam \
+ mochiweb_cookies.beam \
+ mochiweb_cover.beam \
+ mochiweb_echo.beam \
+ mochiweb_headers.beam \
+ mochiweb_html.beam \
+ mochiweb_http.beam \
+ mochiweb_io.beam \
+ mochiweb_mime.beam \
+ mochiweb_multipart.beam \
+ mochiweb_request.beam \
+ mochiweb_response.beam \
+ mochiweb_skel.beam \
+ mochiweb_socket.beam \
+ mochiweb_socket_server.beam \
+ mochiweb_sup.beam \
+ mochiweb_util.beam \
+ reloader.beam
mochiwebebin_DATA = \
$(mochiwebebin_make_generated_file_list)
EXTRA_DIST = \
- $(mochiweb_file_collection)
+ $(mochiweb_file_collection) \
+ internal.hrl
CLEANFILES = \
$(mochiwebebin_make_generated_file_list)
@@ -77,4 +98,5 @@ CLEANFILES = \
cp $< $@
%.beam: %.erl
+
$(ERLC) $(ERLC_FLAGS) $<
diff --git a/src/mochiweb/internal.hrl b/src/mochiweb/internal.hrl
new file mode 100644
index 00000000..6db899a0
--- /dev/null
+++ b/src/mochiweb/internal.hrl
@@ -0,0 +1,3 @@
+
+-define(RECBUF_SIZE, 8192).
+
diff --git a/src/mochiweb/mochifmt.erl b/src/mochiweb/mochifmt.erl
index da0a133a..5bc6b9c4 100644
--- a/src/mochiweb/mochifmt.erl
+++ b/src/mochiweb/mochifmt.erl
@@ -10,7 +10,6 @@
-export([tokenize/1, format/3, get_field/3, format_field/3]).
-export([bformat/2, bformat/3]).
-export([f/2, f/3]).
--export([test/0]).
-record(conversion, {length, precision, ctype, align, fill_char, sign}).
@@ -113,15 +112,6 @@ bformat(Format, Args) ->
bformat(Format, Args, Module) ->
iolist_to_binary(format(Format, Args, Module)).
-%% @spec test() -> ok
-%% @doc Run tests.
-test() ->
- ok = test_tokenize(),
- ok = test_format(),
- ok = test_std(),
- ok = test_records(),
- ok.
-
%% Internal API
add_raw("", Acc) ->
@@ -375,14 +365,21 @@ parse_std_conversion([$. | Spec], Acc) ->
parse_std_conversion([Type], Acc) ->
parse_std_conversion("", Acc#conversion{ctype=ctype(Type)}).
-test_tokenize() ->
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+tokenize_test() ->
{?MODULE, [{raw, "ABC"}]} = tokenize("ABC"),
{?MODULE, [{format, {"0", "", ""}}]} = tokenize("{0}"),
{?MODULE, [{raw, "ABC"}, {format, {"1", "", ""}}, {raw, "DEF"}]} =
tokenize("ABC{1}DEF"),
ok.
-test_format() ->
+format_test() ->
<<" -4">> = bformat("{0:4}", [-4]),
<<" 4">> = bformat("{0:4}", [4]),
<<" 4">> = bformat("{0:{0}}", [4]),
@@ -410,12 +407,12 @@ test_format() ->
{{2008,5,4}, {4, 2, 2}}),
ok.
-test_std() ->
+std_test() ->
M = mochifmt_std:new(),
<<"01">> = bformat("{0}{1}", [0, 1], M),
ok.
-test_records() ->
+records_test() ->
M = mochifmt_records:new([{conversion, record_info(fields, conversion)}]),
R = #conversion{length=long, precision=hard, sign=peace},
long = M:get_value("length", R),
@@ -424,3 +421,5 @@ test_records() ->
<<"long hard">> = bformat("{length} {precision}", R, M),
<<"long hard">> = bformat("{0.length} {0.precision}", [R], M),
ok.
+
+-endif.
diff --git a/src/mochiweb/mochifmt_records.erl b/src/mochiweb/mochifmt_records.erl
index 94c77978..2326d1dd 100644
--- a/src/mochiweb/mochifmt_records.erl
+++ b/src/mochiweb/mochifmt_records.erl
@@ -28,3 +28,11 @@ get_rec_index(Atom, [Atom | _], Index) ->
Index;
get_rec_index(Atom, [_ | Rest], Index) ->
get_rec_index(Atom, Rest, 1 + Index).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochifmt_std.erl b/src/mochiweb/mochifmt_std.erl
index 9442016a..d4d74f6f 100644
--- a/src/mochiweb/mochifmt_std.erl
+++ b/src/mochiweb/mochifmt_std.erl
@@ -21,3 +21,10 @@ get_value(Key, Args) ->
format_field(Arg, Format) ->
mochifmt:format_field(Arg, Format, THIS).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiglobal.erl b/src/mochiweb/mochiglobal.erl
new file mode 100644
index 00000000..c740b878
--- /dev/null
+++ b/src/mochiweb/mochiglobal.erl
@@ -0,0 +1,107 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+%% @doc Abuse module constant pools as a "read-only shared heap" (since erts 5.6)
+%% <a href="http://www.erlang.org/pipermail/erlang-questions/2009-March/042503.html">[1]</a>.
+-module(mochiglobal).
+-author("Bob Ippolito <bob@mochimedia.com>").
+-export([get/1, get/2, put/2, delete/1]).
+
+-spec get(atom()) -> any() | undefined.
+%% @equiv get(K, undefined)
+get(K) ->
+ get(K, undefined).
+
+-spec get(atom(), T) -> any() | T.
+%% @doc Get the term for K or return Default.
+get(K, Default) ->
+ get(K, Default, key_to_module(K)).
+
+get(_K, Default, Mod) ->
+ try Mod:term()
+ catch error:undef ->
+ Default
+ end.
+
+-spec put(atom(), any()) -> ok.
+%% @doc Store term V at K, replaces an existing term if present.
+put(K, V) ->
+ put(K, V, key_to_module(K)).
+
+put(_K, V, Mod) ->
+ Bin = compile(Mod, V),
+ code:purge(Mod),
+ code:load_binary(Mod, atom_to_list(Mod) ++ ".erl", Bin),
+ ok.
+
+-spec delete(atom()) -> boolean().
+%% @doc Delete term stored at K, no-op if non-existent.
+delete(K) ->
+ delete(K, key_to_module(K)).
+
+delete(_K, Mod) ->
+ code:purge(Mod),
+ code:delete(Mod).
+
+-spec key_to_module(atom()) -> atom().
+key_to_module(K) ->
+ list_to_atom("mochiglobal:" ++ atom_to_list(K)).
+
+-spec compile(atom(), any()) -> binary().
+compile(Module, T) ->
+ {ok, Module, Bin} = compile:forms(forms(Module, T),
+ [verbose, report_errors]),
+ Bin.
+
+-spec forms(atom(), any()) -> [erl_syntax:syntaxTree()].
+forms(Module, T) ->
+ [erl_syntax:revert(X) || X <- term_to_abstract(Module, term, T)].
+
+-spec term_to_abstract(atom(), atom(), any()) -> [erl_syntax:syntaxTree()].
+term_to_abstract(Module, Getter, T) ->
+ [%% -module(Module).
+ erl_syntax:attribute(
+ erl_syntax:atom(module),
+ [erl_syntax:atom(Module)]),
+ %% -export([Getter/0]).
+ erl_syntax:attribute(
+ erl_syntax:atom(export),
+ [erl_syntax:list(
+ [erl_syntax:arity_qualifier(
+ erl_syntax:atom(Getter),
+ erl_syntax:integer(0))])]),
+ %% Getter() -> T.
+ erl_syntax:function(
+ erl_syntax:atom(Getter),
+ [erl_syntax:clause([], none, [erl_syntax:abstract(T)])])].
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+get_put_delete_test() ->
+ K = '$$test$$mochiglobal',
+ delete(K),
+ ?assertEqual(
+ bar,
+ get(K, bar)),
+ try
+ ?MODULE:put(K, baz),
+ ?assertEqual(
+ baz,
+ get(K, bar)),
+ ?MODULE:put(K, wibble),
+ ?assertEqual(
+ wibble,
+ ?MODULE:get(K))
+ after
+ delete(K)
+ end,
+ ?assertEqual(
+ bar,
+ get(K, bar)),
+ ?assertEqual(
+ undefined,
+ ?MODULE:get(K)),
+ ok.
+-endif.
diff --git a/src/mochiweb/mochihex.erl b/src/mochiweb/mochihex.erl
index 7fe6899e..44a2aa7f 100644
--- a/src/mochiweb/mochihex.erl
+++ b/src/mochiweb/mochihex.erl
@@ -6,7 +6,7 @@
-module(mochihex).
-author('bob@mochimedia.com').
--export([test/0, to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
+-export([to_hex/1, to_bin/1, to_int/1, dehex/1, hexdigit/1]).
%% @type iolist() = [char() | binary() | iolist()]
%% @type iodata() = iolist() | binary()
@@ -46,16 +46,6 @@ hexdigit(C) when C >= 0, C =< 9 ->
hexdigit(C) when C =< 15 ->
C + $a - 10.
-%% @spec test() -> ok
-%% @doc Test this module.
-test() ->
- "ff000ff1" = to_hex([255, 0, 15, 241]),
- <<255, 0, 15, 241>> = to_bin("ff000ff1"),
- 16#ff000ff1 = to_int("ff000ff1"),
- "ff000ff1" = to_hex(16#ff000ff1),
- ok.
-
-
%% Internal API
to_hex(<<>>, Acc) ->
@@ -73,3 +63,29 @@ to_bin([], Acc) ->
to_bin([C1, C2 | Rest], Acc) ->
to_bin(Rest, [(dehex(C1) bsl 4) bor dehex(C2) | Acc]).
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+to_hex_test() ->
+ "ff000ff1" = to_hex([255, 0, 15, 241]),
+ "ff000ff1" = to_hex(16#ff000ff1),
+ "0" = to_hex(16#0),
+ ok.
+
+to_bin_test() ->
+ <<255, 0, 15, 241>> = to_bin("ff000ff1"),
+ <<255, 0, 10, 161>> = to_bin("Ff000aA1"),
+ ok.
+
+to_int_test() ->
+ 16#ff000ff1 = to_int("ff000ff1"),
+ 16#ff000aa1 = to_int("FF000Aa1"),
+ 16#0 = to_int("0"),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochijson.erl b/src/mochiweb/mochijson.erl
index 74695a75..2e3d1452 100644
--- a/src/mochiweb/mochijson.erl
+++ b/src/mochiweb/mochijson.erl
@@ -8,7 +8,6 @@
-export([decoder/1, decode/1]).
-export([binary_encoder/1, binary_encode/1]).
-export([binary_decoder/1, binary_decode/1]).
--export([test/0]).
% This is a macro to placate syntax highlighters..
-define(Q, $\").
@@ -91,10 +90,6 @@ binary_encode(Any) ->
binary_decode(S) ->
mochijson2:decode(S).
-test() ->
- test_all(),
- mochijson2:test().
-
%% Internal API
parse_encoder_options([], State) ->
@@ -145,7 +140,7 @@ json_encode_proplist([], _State) ->
"{}";
json_encode_proplist(Props, State) ->
F = fun ({K, V}, Acc) ->
- KS = case K of
+ KS = case K of
K when is_atom(K) ->
json_encode_string_utf8(atom_to_list(K));
K when is_integer(K) ->
@@ -321,12 +316,12 @@ tokenize_string([$\\, $u, C3, C2, C1, C0 | Rest], S, Acc) ->
% coalesce UTF-16 surrogate pair?
C = dehex(C0) bor
(dehex(C1) bsl 4) bor
- (dehex(C2) bsl 8) bor
+ (dehex(C2) bsl 8) bor
(dehex(C3) bsl 12),
tokenize_string(Rest, ?ADV_COL(S, 6), [C | Acc]);
tokenize_string([C | Rest], S, Acc) when C >= $\s; C < 16#10FFFF ->
tokenize_string(Rest, ?ADV_COL(S, 1), [C | Acc]).
-
+
tokenize_number(IoList=[C | _], Mode, S=#decoder{input_encoding=utf8}, Acc)
when is_list(C); is_binary(C); C >= 16#7f ->
List = xmerl_ucs:from_utf8(iolist_to_binary(IoList)),
@@ -407,6 +402,13 @@ tokenize(L=[C | _], S) when C >= $0, C =< $9; C == $- ->
{{const, list_to_float(Float)}, Rest, S1}
end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
%% testing constructs borrowed from the Yaws JSON implementation.
%% Create an object from a list of Key/Value pairs.
@@ -419,7 +421,7 @@ is_obj({struct, Props}) ->
true;
(_) ->
false
- end,
+ end,
lists:all(F, Props).
obj_from_list(Props) ->
@@ -462,11 +464,10 @@ equiv_list([], []) ->
equiv_list([V1 | L1], [V2 | L2]) ->
equiv(V1, V2) andalso equiv_list(L1, L2).
-test_all() ->
- test_issue33(),
+e2j_vec_test() ->
test_one(e2j_test_vec(utf8), 1).
-test_issue33() ->
+issue33_test() ->
%% http://code.google.com/p/mochiweb/issues/detail?id=33
Js = {struct, [{"key", [194, 163]}]},
Encoder = encoder([{input_encoding, utf8}]),
@@ -526,3 +527,5 @@ e2j_test_vec(utf8) ->
{{array, [-123, "foo", obj_from_list([{"bar", {array, []}}]), null]},
"[-123,\"foo\",{\"bar\":[]},null]"}
].
+
+-endif.
diff --git a/src/mochiweb/mochijson2.erl b/src/mochiweb/mochijson2.erl
index 66f68bf0..64cabc86 100644
--- a/src/mochiweb/mochijson2.erl
+++ b/src/mochiweb/mochijson2.erl
@@ -9,7 +9,6 @@
-author('bob@mochimedia.com').
-export([encoder/1, encode/1]).
-export([decoder/1, decode/1]).
--export([test/0]).
% This is a macro to placate syntax highlighters..
-define(Q, $\").
@@ -39,8 +38,9 @@
%% @type json_number() = integer() | float()
%% @type json_array() = [json_term()]
%% @type json_object() = {struct, [{json_string(), json_term()}]}
+%% @type json_iolist() = {json, iolist()}
%% @type json_term() = json_string() | json_number() | json_array() |
-%% json_object()
+%% json_object() | json_iolist()
-record(encoder, {handler=null,
utf8=false}).
@@ -75,9 +75,6 @@ decoder(Options) ->
decode(S) ->
json_decode(S, #decoder{}).
-test() ->
- test_all().
-
%% Internal API
parse_encoder_options([], State) ->
@@ -98,11 +95,8 @@ json_encode(false, _State) ->
<<"false">>;
json_encode(null, _State) ->
<<"null">>;
-json_encode(I, _State) when is_integer(I) andalso I >= -2147483648 andalso I =< 2147483647 ->
- %% Anything outside of 32-bit integers should be encoded as a float
- integer_to_list(I);
json_encode(I, _State) when is_integer(I) ->
- mochinum:digits(float(I));
+ integer_to_list(I);
json_encode(F, _State) when is_float(F) ->
mochinum:digits(F);
json_encode(S, State) when is_binary(S); is_atom(S) ->
@@ -111,6 +105,8 @@ json_encode(Array, State) when is_list(Array) ->
json_encode_array(Array, State);
json_encode({struct, Props}, State) when is_list(Props) ->
json_encode_proplist(Props, State);
+json_encode({json, IoList}, _State) ->
+ IoList;
json_encode(Bad, #encoder{handler=null}) ->
exit({json_encode, {bad_term, Bad}});
json_encode(Bad, State=#encoder{handler=Handler}) ->
@@ -205,12 +201,10 @@ json_bin_is_safe(<<C, Rest/binary>>) ->
false;
$\t ->
false;
- C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
+ C when C >= 0, C < $\s; C >= 16#7f ->
false;
C when C < 16#7f ->
- json_bin_is_safe(Rest);
- _ ->
- false
+ json_bin_is_safe(Rest)
end.
json_encode_string_unicode([], _State, Acc) ->
@@ -408,8 +402,22 @@ tokenize_string(B, S=#decoder{offset=O}, Acc) ->
Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc),
tokenize_string(B, ?ADV_COL(S, 6), Acc1)
end;
- <<_:O/binary, C, _/binary>> ->
- tokenize_string(B, ?INC_CHAR(S, C), [C | Acc])
+ <<_:O/binary, C1, _/binary>> when C1 < 128 ->
+ tokenize_string(B, ?INC_CHAR(S, C1), [C1 | Acc]);
+ <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
+ C2 >= 128, C2 =< 191 ->
+ tokenize_string(B, ?ADV_COL(S, 2), [C2, C1 | Acc]);
+ <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
+ C2 >= 128, C2 =< 191,
+ C3 >= 128, C3 =< 191 ->
+ tokenize_string(B, ?ADV_COL(S, 3), [C3, C2, C1 | Acc]);
+ <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
+ C2 >= 128, C2 =< 191,
+ C3 >= 128, C3 =< 191,
+ C4 >= 128, C4 =< 191 ->
+ tokenize_string(B, ?ADV_COL(S, 4), [C4, C3, C2, C1 | Acc]);
+ _ ->
+ throw(invalid_utf8)
end.
tokenize_number(B, S) ->
@@ -510,6 +518,12 @@ tokenize(B, S=#decoder{offset=O}) ->
trim = S#decoder.state,
{eof, S}
end.
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
%% testing constructs borrowed from the Yaws JSON implementation.
@@ -519,19 +533,13 @@ obj_new() ->
{struct, []}.
is_obj({struct, Props}) ->
- F = fun ({K, _}) when is_binary(K) ->
- true;
- (_) ->
- false
- end,
+ F = fun ({K, _}) when is_binary(K) -> true end,
lists:all(F, Props).
obj_from_list(Props) ->
Obj = {struct, Props},
- case is_obj(Obj) of
- true -> Obj;
- false -> exit({json_bad_object, Obj})
- end.
+ ?assert(is_obj(Obj)),
+ Obj.
%% Test for equivalence of Erlang terms.
%% Due to arbitrary order of construction, equivalent objects might
@@ -544,9 +552,7 @@ equiv(L1, L2) when is_list(L1), is_list(L2) ->
equiv_list(L1, L2);
equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
-equiv(true, true) -> true;
-equiv(false, false) -> true;
-equiv(null, null) -> true.
+equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true.
%% Object representation and traversal order is unknown.
%% Use the sledgehammer and sort property lists.
@@ -566,11 +572,11 @@ equiv_list([], []) ->
equiv_list([V1 | L1], [V2 | L2]) ->
equiv(V1, V2) andalso equiv_list(L1, L2).
-test_all() ->
+decode_test() ->
[1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
- <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]),
- test_encoder_utf8(),
- test_input_validation(),
+ <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]).
+
+e2j_vec_test() ->
test_one(e2j_test_vec(utf8), 1).
test_one([], _N) ->
@@ -627,7 +633,7 @@ e2j_test_vec(utf8) ->
].
%% test utf8 encoding
-test_encoder_utf8() ->
+encoder_utf8_test() ->
%% safe conversion case (default)
[34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] =
encode(<<1,"\321\202\320\265\321\201\321\202">>),
@@ -637,11 +643,11 @@ test_encoder_utf8() ->
[34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] =
Enc(<<1,"\321\202\320\265\321\201\321\202">>).
-test_input_validation() ->
+input_validation_test() ->
Good = [
- {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, % pound
- {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, % euro
- {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} % denarius
+ {16#00A3, <<?Q, 16#C2, 16#A3, ?Q>>}, %% pound
+ {16#20AC, <<?Q, 16#E2, 16#82, 16#AC, ?Q>>}, %% euro
+ {16#10196, <<?Q, 16#F0, 16#90, 16#86, 16#96, ?Q>>} %% denarius
],
lists:foreach(fun({CodePoint, UTF8}) ->
Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)),
@@ -649,15 +655,148 @@ test_input_validation() ->
end, Good),
Bad = [
- % 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
+ %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
<<?Q, 16#80, ?Q>>,
- % missing continuations, last byte in each should be 80-BF
+ %% missing continuations, last byte in each should be 80-BF
<<?Q, 16#C2, 16#7F, ?Q>>,
<<?Q, 16#E0, 16#80,16#7F, ?Q>>,
<<?Q, 16#F0, 16#80, 16#80, 16#7F, ?Q>>,
- % we don't support code points > 10FFFF per RFC 3629
- <<?Q, 16#F5, 16#80, 16#80, 16#80, ?Q>>
+ %% we don't support code points > 10FFFF per RFC 3629
+ <<?Q, 16#F5, 16#80, 16#80, 16#80, ?Q>>,
+ %% escape characters trigger a different code path
+ <<?Q, $\\, $\n, 16#80, ?Q>>
],
- lists:foreach(fun(X) ->
- ok = try decode(X) catch invalid_utf8 -> ok end
- end, Bad).
+ lists:foreach(
+ fun(X) ->
+ ok = try decode(X) catch invalid_utf8 -> ok end,
+ %% could be {ucs,{bad_utf8_character_code}} or
+ %% {json_encode,{bad_char,_}}
+ {'EXIT', _} = (catch encode(X))
+ end, Bad).
+
+inline_json_test() ->
+ ?assertEqual(<<"\"iodata iodata\"">>,
+ iolist_to_binary(
+ encode({json, [<<"\"iodata">>, " iodata\""]}))),
+ ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]},
+ decode(
+ encode({struct,
+ [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))),
+ ok.
+
+big_unicode_test() ->
+ UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)),
+ ?assertEqual(
+ <<"\"\\ud834\\udd20\"">>,
+ iolist_to_binary(encode(UTF8Seq))),
+ ?assertEqual(
+ UTF8Seq,
+ decode(iolist_to_binary(encode(UTF8Seq)))),
+ ok.
+
+custom_decoder_test() ->
+ ?assertEqual(
+ {struct, [{<<"key">>, <<"value">>}]},
+ (decoder([]))("{\"key\": \"value\"}")),
+ F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end,
+ ?assertEqual(
+ win,
+ (decoder([{object_hook, F}]))("{\"key\": \"value\"}")),
+ ok.
+
+atom_test() ->
+ %% JSON native atoms
+ [begin
+ ?assertEqual(A, decode(atom_to_list(A))),
+ ?assertEqual(iolist_to_binary(atom_to_list(A)),
+ iolist_to_binary(encode(A)))
+ end || A <- [true, false, null]],
+ %% Atom to string
+ ?assertEqual(
+ <<"\"foo\"">>,
+ iolist_to_binary(encode(foo))),
+ ?assertEqual(
+ <<"\"\\ud834\\udd20\"">>,
+ iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))),
+ ok.
+
+key_encode_test() ->
+ %% Some forms are accepted as keys that would not be strings in other
+ %% cases
+ ?assertEqual(
+ <<"{\"foo\":1}">>,
+ iolist_to_binary(encode({struct, [{foo, 1}]}))),
+ ?assertEqual(
+ <<"{\"foo\":1}">>,
+ iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))),
+ ?assertEqual(
+ <<"{\"foo\":1}">>,
+ iolist_to_binary(encode({struct, [{"foo", 1}]}))),
+ ?assertEqual(
+ <<"{\"\\ud834\\udd20\":1}">>,
+ iolist_to_binary(
+ encode({struct, [{[16#0001d120], 1}]}))),
+ ?assertEqual(
+ <<"{\"1\":1}">>,
+ iolist_to_binary(encode({struct, [{1, 1}]}))),
+ ok.
+
+unsafe_chars_test() ->
+ Chars = "\"\\\b\f\n\r\t",
+ [begin
+ ?assertEqual(false, json_string_is_safe([C])),
+ ?assertEqual(false, json_bin_is_safe(<<C>>)),
+ ?assertEqual(<<C>>, decode(encode(<<C>>)))
+ end || C <- Chars],
+ ?assertEqual(
+ false,
+ json_string_is_safe([16#0001d120])),
+ ?assertEqual(
+ false,
+ json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))),
+ ?assertEqual(
+ [16#0001d120],
+ xmerl_ucs:from_utf8(
+ binary_to_list(
+ decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))),
+ ?assertEqual(
+ false,
+ json_string_is_safe([16#110000])),
+ ?assertEqual(
+ false,
+ json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))),
+ %% solidus can be escaped but isn't unsafe by default
+ ?assertEqual(
+ <<"/">>,
+ decode(<<"\"\\/\"">>)),
+ ok.
+
+int_test() ->
+ ?assertEqual(0, decode("0")),
+ ?assertEqual(1, decode("1")),
+ ?assertEqual(11, decode("11")),
+ ok.
+
+large_int_test() ->
+ ?assertEqual(<<"-2147483649214748364921474836492147483649">>,
+ iolist_to_binary(encode(-2147483649214748364921474836492147483649))),
+ ?assertEqual(<<"2147483649214748364921474836492147483649">>,
+ iolist_to_binary(encode(2147483649214748364921474836492147483649))),
+ ok.
+
+float_test() ->
+ ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649.0))),
+ ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648.0))),
+ ok.
+
+handler_test() ->
+ ?assertEqual(
+ {'EXIT',{json_encode,{bad_term,{}}}},
+ catch encode({})),
+ F = fun ({}) -> [] end,
+ ?assertEqual(
+ <<"[]">>,
+ iolist_to_binary((encoder([{handler, F}]))({}))),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochilists.erl b/src/mochiweb/mochilists.erl
new file mode 100644
index 00000000..8981e7b6
--- /dev/null
+++ b/src/mochiweb/mochilists.erl
@@ -0,0 +1,104 @@
+%% @copyright Copyright (c) 2010 Mochi Media, Inc.
+%% @author David Reid <dreid@mochimedia.com>
+
+%% @doc Utility functions for dealing with proplists.
+
+-module(mochilists).
+-author("David Reid <dreid@mochimedia.com>").
+-export([get_value/2, get_value/3, is_defined/2, set_default/2, set_defaults/2]).
+
+%% @spec set_default({Key::term(), Value::term()}, Proplist::list()) -> list()
+%%
+%% @doc Return new Proplist with {Key, Value} set if not is_defined(Key, Proplist).
+set_default({Key, Value}, Proplist) ->
+ case is_defined(Key, Proplist) of
+ true ->
+ Proplist;
+ false ->
+ [{Key, Value} | Proplist]
+ end.
+
+%% @spec set_defaults([{Key::term(), Value::term()}], Proplist::list()) -> list()
+%%
+%% @doc Return new Proplist with {Key, Value} set if not is_defined(Key, Proplist).
+set_defaults(DefaultProps, Proplist) ->
+ lists:foldl(fun set_default/2, Proplist, DefaultProps).
+
+
+%% @spec is_defined(Key::term(), Proplist::list()) -> bool()
+%%
+%% @doc Returns true if Propist contains at least one entry associated
+%% with Key, otherwise false is returned.
+is_defined(Key, Proplist) ->
+ lists:keyfind(Key, 1, Proplist) =/= false.
+
+
+%% @spec get_value(Key::term(), Proplist::list()) -> term() | undefined
+%%
+%% @doc Return the value of <code>Key</code> or undefined
+get_value(Key, Proplist) ->
+ get_value(Key, Proplist, undefined).
+
+%% @spec get_value(Key::term(), Proplist::list(), Default::term()) -> term()
+%%
+%% @doc Return the value of <code>Key</code> or <code>Default</code>
+get_value(_Key, [], Default) ->
+ Default;
+get_value(Key, Proplist, Default) ->
+ case lists:keyfind(Key, 1, Proplist) of
+ false ->
+ Default;
+ {Key, Value} ->
+ Value
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+set_defaults_test() ->
+ ?assertEqual(
+ [{k, v}],
+ set_defaults([{k, v}], [])),
+ ?assertEqual(
+ [{k, v}],
+ set_defaults([{k, vee}], [{k, v}])),
+ ?assertEqual(
+ lists:sort([{kay, vee}, {k, v}]),
+ lists:sort(set_defaults([{k, vee}, {kay, vee}], [{k, v}]))),
+ ok.
+
+set_default_test() ->
+ ?assertEqual(
+ [{k, v}],
+ set_default({k, v}, [])),
+ ?assertEqual(
+ [{k, v}],
+ set_default({k, vee}, [{k, v}])),
+ ok.
+
+get_value_test() ->
+ ?assertEqual(
+ undefined,
+ get_value(foo, [])),
+ ?assertEqual(
+ undefined,
+ get_value(foo, [{bar, baz}])),
+ ?assertEqual(
+ bar,
+ get_value(foo, [{foo, bar}])),
+ ?assertEqual(
+ default,
+ get_value(foo, [], default)),
+ ?assertEqual(
+ default,
+ get_value(foo, [{bar, baz}], default)),
+ ?assertEqual(
+ bar,
+ get_value(foo, [{foo, bar}], default)),
+ ok.
+
+-endif.
+
diff --git a/src/mochiweb/mochilogfile2.erl b/src/mochiweb/mochilogfile2.erl
new file mode 100644
index 00000000..c34ee73a
--- /dev/null
+++ b/src/mochiweb/mochilogfile2.erl
@@ -0,0 +1,140 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc Write newline delimited log files, ensuring that if a truncated
+%% entry is found on log open then it is fixed before writing. Uses
+%% delayed writes and raw files for performance.
+-module(mochilogfile2).
+-author('bob@mochimedia.com').
+
+-export([open/1, write/2, close/1, name/1]).
+
+%% @spec open(Name) -> Handle
+%% @doc Open the log file Name, creating or appending as necessary. All data
+%% at the end of the file will be truncated until a newline is found, to
+%% ensure that all records are complete.
+open(Name) ->
+ {ok, FD} = file:open(Name, [raw, read, write, delayed_write, binary]),
+ fix_log(FD),
+ {?MODULE, Name, FD}.
+
+%% @spec name(Handle) -> string()
+%% @doc Return the path of the log file.
+name({?MODULE, Name, _FD}) ->
+ Name.
+
+%% @spec write(Handle, IoData) -> ok
+%% @doc Write IoData to the log file referenced by Handle.
+write({?MODULE, _Name, FD}, IoData) ->
+ ok = file:write(FD, [IoData, $\n]),
+ ok.
+
+%% @spec close(Handle) -> ok
+%% @doc Close the log file referenced by Handle.
+close({?MODULE, _Name, FD}) ->
+ ok = file:sync(FD),
+ ok = file:close(FD),
+ ok.
+
+fix_log(FD) ->
+ {ok, Location} = file:position(FD, eof),
+ Seek = find_last_newline(FD, Location),
+ {ok, Seek} = file:position(FD, Seek),
+ ok = file:truncate(FD),
+ ok.
+
+%% Seek backwards to the last valid log entry
+find_last_newline(_FD, N) when N =< 1 ->
+ 0;
+find_last_newline(FD, Location) ->
+ case file:pread(FD, Location - 1, 1) of
+ {ok, <<$\n>>} ->
+ Location;
+ {ok, _} ->
+ find_last_newline(FD, Location - 1)
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+name_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "open_close_test.log"),
+ H = open(FileName),
+ ?assertEqual(
+ FileName,
+ name(H)),
+ close(H),
+ file:delete(FileName),
+ file:del_dir(D),
+ ok.
+
+open_close_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "open_close_test.log"),
+ OpenClose = fun () ->
+ H = open(FileName),
+ ?assertEqual(
+ true,
+ filelib:is_file(FileName)),
+ ok = close(H),
+ ?assertEqual(
+ {ok, <<>>},
+ file:read_file(FileName)),
+ ok
+ end,
+ OpenClose(),
+ OpenClose(),
+ file:delete(FileName),
+ file:del_dir(D),
+ ok.
+
+write_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "write_test.log"),
+ F = fun () ->
+ H = open(FileName),
+ write(H, "test line"),
+ close(H),
+ ok
+ end,
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\n">>},
+ file:read_file(FileName)),
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\ntest line\n">>},
+ file:read_file(FileName)),
+ file:delete(FileName),
+ file:del_dir(D),
+ ok.
+
+fix_log_test() ->
+ D = mochitemp:mkdtemp(),
+ FileName = filename:join(D, "write_test.log"),
+ file:write_file(FileName, <<"first line good\nsecond line bad">>),
+ F = fun () ->
+ H = open(FileName),
+ write(H, "test line"),
+ close(H),
+ ok
+ end,
+ F(),
+ ?assertEqual(
+ {ok, <<"first line good\ntest line\n">>},
+ file:read_file(FileName)),
+ file:write_file(FileName, <<"first line bad">>),
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\n">>},
+ file:read_file(FileName)),
+ F(),
+ ?assertEqual(
+ {ok, <<"test line\ntest line\n">>},
+ file:read_file(FileName)),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochinum.erl b/src/mochiweb/mochinum.erl
index 6a866042..a7e2bfbc 100644
--- a/src/mochiweb/mochinum.erl
+++ b/src/mochiweb/mochinum.erl
@@ -11,7 +11,7 @@
-module(mochinum).
-author("Bob Ippolito <bob@mochimedia.com>").
--export([digits/1, frexp/1, int_pow/2, int_ceil/1, test/0]).
+-export([digits/1, frexp/1, int_pow/2, int_ceil/1]).
%% IEEE 754 Float exponent bias
-define(FLOAT_BIAS, 1022).
@@ -120,7 +120,7 @@ digits1(Float, Exp, Frac) ->
case Exp >= 0 of
true ->
BExp = 1 bsl Exp,
- case (Frac /= ?BIG_POW) of
+ case (Frac =/= ?BIG_POW) of
true ->
scale((Frac * BExp * 2), 2, BExp, BExp,
Round, Round, Float);
@@ -129,7 +129,7 @@ digits1(Float, Exp, Frac) ->
Round, Round, Float)
end;
false ->
- case (Exp == ?MIN_EXP) orelse (Frac /= ?BIG_POW) of
+ case (Exp =:= ?MIN_EXP) orelse (Frac =/= ?BIG_POW) of
true ->
scale((Frac * 2), 1 bsl (1 - Exp), 1, 1,
Round, Round, Float);
@@ -228,14 +228,13 @@ log2floor(Int, N) ->
log2floor(Int bsr 1, 1 + N).
-test() ->
- ok = test_frexp(),
- ok = test_int_ceil(),
- ok = test_int_pow(),
- ok = test_digits(),
- ok.
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
-test_int_ceil() ->
+int_ceil_test() ->
1 = int_ceil(0.0001),
0 = int_ceil(0.0),
1 = int_ceil(0.99),
@@ -244,7 +243,7 @@ test_int_ceil() ->
-2 = int_ceil(-2.0),
ok.
-test_int_pow() ->
+int_pow_test() ->
1 = int_pow(1, 1),
1 = int_pow(1, 0),
1 = int_pow(10, 0),
@@ -253,17 +252,58 @@ test_int_pow() ->
1000 = int_pow(10, 3),
ok.
-test_digits() ->
- "0" = digits(0),
- "0.0" = digits(0.0),
- "1.0" = digits(1.0),
- "-1.0" = digits(-1.0),
- "0.1" = digits(0.1),
- "0.01" = digits(0.01),
- "0.001" = digits(0.001),
+digits_test() ->
+ ?assertEqual("0",
+ digits(0)),
+ ?assertEqual("0.0",
+ digits(0.0)),
+ ?assertEqual("1.0",
+ digits(1.0)),
+ ?assertEqual("-1.0",
+ digits(-1.0)),
+ ?assertEqual("0.1",
+ digits(0.1)),
+ ?assertEqual("0.01",
+ digits(0.01)),
+ ?assertEqual("0.001",
+ digits(0.001)),
+ ?assertEqual("1.0e+6",
+ digits(1000000.0)),
+ ?assertEqual("0.5",
+ digits(0.5)),
+ ?assertEqual("4503599627370496.0",
+ digits(4503599627370496.0)),
+ %% small denormalized number
+ %% 4.94065645841246544177e-324
+ <<SmallDenorm/float>> = <<0,0,0,0,0,0,0,1>>,
+ ?assertEqual("4.9406564584124654e-324",
+ digits(SmallDenorm)),
+ ?assertEqual(SmallDenorm,
+ list_to_float(digits(SmallDenorm))),
+ %% large denormalized number
+ %% 2.22507385850720088902e-308
+ <<BigDenorm/float>> = <<0,15,255,255,255,255,255,255>>,
+ ?assertEqual("2.225073858507201e-308",
+ digits(BigDenorm)),
+ ?assertEqual(BigDenorm,
+ list_to_float(digits(BigDenorm))),
+ %% small normalized number
+ %% 2.22507385850720138309e-308
+ <<SmallNorm/float>> = <<0,16,0,0,0,0,0,0>>,
+ ?assertEqual("2.2250738585072014e-308",
+ digits(SmallNorm)),
+ ?assertEqual(SmallNorm,
+ list_to_float(digits(SmallNorm))),
+ %% large normalized number
+ %% 1.79769313486231570815e+308
+ <<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
+ ?assertEqual("1.7976931348623157e+308",
+ digits(LargeNorm)),
+ ?assertEqual(LargeNorm,
+ list_to_float(digits(LargeNorm))),
ok.
-test_frexp() ->
+frexp_test() ->
%% zero
{0.0, 0} = frexp(0.0),
%% one
@@ -287,3 +327,5 @@ test_frexp() ->
<<LargeNorm/float>> = <<127,239,255,255,255,255,255,255>>,
{0.99999999999999989, 1024} = frexp(LargeNorm),
ok.
+
+-endif.
diff --git a/src/mochiweb/mochitemp.erl b/src/mochiweb/mochitemp.erl
new file mode 100644
index 00000000..bb23d2a6
--- /dev/null
+++ b/src/mochiweb/mochitemp.erl
@@ -0,0 +1,310 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc Create temporary files and directories. Requires crypto to be started.
+
+-module(mochitemp).
+-export([gettempdir/0]).
+-export([mkdtemp/0, mkdtemp/3]).
+-export([rmtempdir/1]).
+%% -export([mkstemp/4]).
+-define(SAFE_CHARS, {$a, $b, $c, $d, $e, $f, $g, $h, $i, $j, $k, $l, $m,
+ $n, $o, $p, $q, $r, $s, $t, $u, $v, $w, $x, $y, $z,
+ $A, $B, $C, $D, $E, $F, $G, $H, $I, $J, $K, $L, $M,
+ $N, $O, $P, $Q, $R, $S, $T, $U, $V, $W, $X, $Y, $Z,
+ $0, $1, $2, $3, $4, $5, $6, $7, $8, $9, $_}).
+-define(TMP_MAX, 10000).
+
+-include_lib("kernel/include/file.hrl").
+
+%% TODO: An ugly wrapper over the mktemp tool with open_port and sadness?
+%% We can't implement this race-free in Erlang without the ability
+%% to issue O_CREAT|O_EXCL. I suppose we could hack something with
+%% mkdtemp, del_dir, open.
+%% mkstemp(Suffix, Prefix, Dir, Options) ->
+%% ok.
+
+rmtempdir(Dir) ->
+ case file:del_dir(Dir) of
+ {error, eexist} ->
+ ok = rmtempdirfiles(Dir),
+ ok = file:del_dir(Dir);
+ ok ->
+ ok
+ end.
+
+rmtempdirfiles(Dir) ->
+ {ok, Files} = file:list_dir(Dir),
+ ok = rmtempdirfiles(Dir, Files).
+
+rmtempdirfiles(_Dir, []) ->
+ ok;
+rmtempdirfiles(Dir, [Basename | Rest]) ->
+ Path = filename:join([Dir, Basename]),
+ case filelib:is_dir(Path) of
+ true ->
+ ok = rmtempdir(Path);
+ false ->
+ ok = file:delete(Path)
+ end,
+ rmtempdirfiles(Dir, Rest).
+
+mkdtemp() ->
+ mkdtemp("", "tmp", gettempdir()).
+
+mkdtemp(Suffix, Prefix, Dir) ->
+ mkdtemp_n(rngpath_fun(Suffix, Prefix, Dir), ?TMP_MAX).
+
+
+
+mkdtemp_n(RngPath, 1) ->
+ make_dir(RngPath());
+mkdtemp_n(RngPath, N) ->
+ try make_dir(RngPath())
+ catch throw:{error, eexist} ->
+ mkdtemp_n(RngPath, N - 1)
+ end.
+
+make_dir(Path) ->
+ case file:make_dir(Path) of
+ ok ->
+ ok;
+ E={error, eexist} ->
+ throw(E)
+ end,
+ %% Small window for a race condition here because dir is created 777
+ ok = file:write_file_info(Path, #file_info{mode=8#0700}),
+ Path.
+
+rngpath_fun(Prefix, Suffix, Dir) ->
+ fun () ->
+ filename:join([Dir, Prefix ++ rngchars(6) ++ Suffix])
+ end.
+
+rngchars(0) ->
+ "";
+rngchars(N) ->
+ [rngchar() | rngchars(N - 1)].
+
+rngchar() ->
+ rngchar(crypto:rand_uniform(0, tuple_size(?SAFE_CHARS))).
+
+rngchar(C) ->
+ element(1 + C, ?SAFE_CHARS).
+
+%% @spec gettempdir() -> string()
+%% @doc Get a usable temporary directory using the first of these that is a directory:
+%% $TMPDIR, $TMP, $TEMP, "/tmp", "/var/tmp", "/usr/tmp", ".".
+gettempdir() ->
+ gettempdir(gettempdir_checks(), fun normalize_dir/1).
+
+gettempdir_checks() ->
+ [{fun os:getenv/1, ["TMPDIR", "TMP", "TEMP"]},
+ {fun gettempdir_identity/1, ["/tmp", "/var/tmp", "/usr/tmp"]},
+ {fun gettempdir_cwd/1, [cwd]}].
+
+gettempdir_identity(L) ->
+ L.
+
+gettempdir_cwd(cwd) ->
+ {ok, L} = file:get_cwd(),
+ L.
+
+gettempdir([{_F, []} | RestF], Normalize) ->
+ gettempdir(RestF, Normalize);
+gettempdir([{F, [L | RestL]} | RestF], Normalize) ->
+ case Normalize(F(L)) of
+ false ->
+ gettempdir([{F, RestL} | RestF], Normalize);
+ Dir ->
+ Dir
+ end.
+
+normalize_dir(False) when False =:= false orelse False =:= "" ->
+ %% Erlang doesn't have an unsetenv, wtf.
+ false;
+normalize_dir(L) ->
+ Dir = filename:absname(L),
+ case filelib:is_dir(Dir) of
+ false ->
+ false;
+ true ->
+ Dir
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+pushenv(L) ->
+ [{K, os:getenv(K)} || K <- L].
+popenv(L) ->
+ F = fun ({K, false}) ->
+ %% Erlang doesn't have an unsetenv, wtf.
+ os:putenv(K, "");
+ ({K, V}) ->
+ os:putenv(K, V)
+ end,
+ lists:foreach(F, L).
+
+gettempdir_fallback_test() ->
+ ?assertEqual(
+ "/",
+ gettempdir([{fun gettempdir_identity/1, ["/--not-here--/"]},
+ {fun gettempdir_identity/1, ["/"]}],
+ fun normalize_dir/1)),
+ ?assertEqual(
+ "/",
+ %% simulate a true os:getenv unset env
+ gettempdir([{fun gettempdir_identity/1, [false]},
+ {fun gettempdir_identity/1, ["/"]}],
+ fun normalize_dir/1)),
+ ok.
+
+gettempdir_identity_test() ->
+ ?assertEqual(
+ "/",
+ gettempdir([{fun gettempdir_identity/1, ["/"]}], fun normalize_dir/1)),
+ ok.
+
+gettempdir_cwd_test() ->
+ {ok, Cwd} = file:get_cwd(),
+ ?assertEqual(
+ normalize_dir(Cwd),
+ gettempdir([{fun gettempdir_cwd/1, [cwd]}], fun normalize_dir/1)),
+ ok.
+
+rngchars_test() ->
+ crypto:start(),
+ ?assertEqual(
+ "",
+ rngchars(0)),
+ ?assertEqual(
+ 10,
+ length(rngchars(10))),
+ ok.
+
+rngchar_test() ->
+ ?assertEqual(
+ $a,
+ rngchar(0)),
+ ?assertEqual(
+ $A,
+ rngchar(26)),
+ ?assertEqual(
+ $_,
+ rngchar(62)),
+ ok.
+
+mkdtemp_n_failonce_test() ->
+ crypto:start(),
+ D = mkdtemp(),
+ Path = filename:join([D, "testdir"]),
+ %% Toggle the existence of a dir so that it fails
+ %% the first time and succeeds the second.
+ F = fun () ->
+ case filelib:is_dir(Path) of
+ true ->
+ file:del_dir(Path);
+ false ->
+ file:make_dir(Path)
+ end,
+ Path
+ end,
+ try
+ %% Fails the first time
+ ?assertThrow(
+ {error, eexist},
+ mkdtemp_n(F, 1)),
+ %% Reset state
+ file:del_dir(Path),
+ %% Succeeds the second time
+ ?assertEqual(
+ Path,
+ mkdtemp_n(F, 2))
+ after rmtempdir(D)
+ end,
+ ok.
+
+mkdtemp_n_fail_test() ->
+ {ok, Cwd} = file:get_cwd(),
+ ?assertThrow(
+ {error, eexist},
+ mkdtemp_n(fun () -> Cwd end, 1)),
+ ?assertThrow(
+ {error, eexist},
+ mkdtemp_n(fun () -> Cwd end, 2)),
+ ok.
+
+make_dir_fail_test() ->
+ {ok, Cwd} = file:get_cwd(),
+ ?assertThrow(
+ {error, eexist},
+ make_dir(Cwd)),
+ ok.
+
+mkdtemp_test() ->
+ crypto:start(),
+ D = mkdtemp(),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D)),
+ ?assertEqual(
+ ok,
+ file:del_dir(D)),
+ ok.
+
+rmtempdir_test() ->
+ crypto:start(),
+ D1 = mkdtemp(),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D1)),
+ ?assertEqual(
+ ok,
+ rmtempdir(D1)),
+ D2 = mkdtemp(),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D2)),
+ ok = file:write_file(filename:join([D2, "foo"]), <<"bytes">>),
+ D3 = mkdtemp("suffix", "prefix", D2),
+ ?assertEqual(
+ true,
+ filelib:is_dir(D3)),
+ ok = file:write_file(filename:join([D3, "foo"]), <<"bytes">>),
+ ?assertEqual(
+ ok,
+ rmtempdir(D2)),
+ ?assertEqual(
+ {error, enoent},
+ file:consult(D3)),
+ ?assertEqual(
+ {error, enoent},
+ file:consult(D2)),
+ ok.
+
+gettempdir_env_test() ->
+ Env = pushenv(["TMPDIR", "TEMP", "TMP"]),
+ FalseEnv = [{"TMPDIR", false}, {"TEMP", false}, {"TMP", false}],
+ try
+ popenv(FalseEnv),
+ popenv([{"TMPDIR", "/"}]),
+ ?assertEqual(
+ "/",
+ os:getenv("TMPDIR")),
+ ?assertEqual(
+ "/",
+ gettempdir()),
+ {ok, Cwd} = file:get_cwd(),
+ popenv(FalseEnv),
+ popenv([{"TMP", Cwd}]),
+ ?assertEqual(
+ normalize_dir(Cwd),
+ gettempdir())
+ after popenv(Env)
+ end,
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiutf8.erl b/src/mochiweb/mochiutf8.erl
new file mode 100644
index 00000000..206e1186
--- /dev/null
+++ b/src/mochiweb/mochiutf8.erl
@@ -0,0 +1,316 @@
+%% @copyright 2010 Mochi Media, Inc.
+%% @author Bob Ippolito <bob@mochimedia.com>
+
+%% @doc Algorithm to convert any binary to a valid UTF-8 sequence by ignoring
+%% invalid bytes.
+
+-module(mochiutf8).
+-export([valid_utf8_bytes/1, codepoint_to_bytes/1, bytes_to_codepoints/1]).
+-export([bytes_foldl/3, codepoint_foldl/3, read_codepoint/1, len/1]).
+
+%% External API
+
+-type unichar_low() :: 0..16#d7ff.
+-type unichar_high() :: 16#e000..16#10ffff.
+-type unichar() :: unichar_low() | unichar_high().
+
+-spec codepoint_to_bytes(unichar()) -> binary().
+%% @doc Convert a unicode codepoint to UTF-8 bytes.
+codepoint_to_bytes(C) when (C >= 16#00 andalso C =< 16#7f) ->
+ %% U+0000 - U+007F - 7 bits
+ <<C>>;
+codepoint_to_bytes(C) when (C >= 16#080 andalso C =< 16#07FF) ->
+ %% U+0080 - U+07FF - 11 bits
+ <<0:5, B1:5, B0:6>> = <<C:16>>,
+ <<2#110:3, B1:5,
+ 2#10:2, B0:6>>;
+codepoint_to_bytes(C) when (C >= 16#0800 andalso C =< 16#FFFF) andalso
+ (C < 16#D800 orelse C > 16#DFFF) ->
+ %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
+ <<B2:4, B1:6, B0:6>> = <<C:16>>,
+ <<2#1110:4, B2:4,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6>>;
+codepoint_to_bytes(C) when (C >= 16#010000 andalso C =< 16#10FFFF) ->
+ %% U+10000 - U+10FFFF - 21 bits
+ <<0:3, B3:3, B2:6, B1:6, B0:6>> = <<C:24>>,
+ <<2#11110:5, B3:3,
+ 2#10:2, B2:6,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6>>.
+
+-spec codepoints_to_bytes([unichar()]) -> binary().
+%% @doc Convert a list of codepoints to a UTF-8 binary.
+codepoints_to_bytes(L) ->
+ <<<<(codepoint_to_bytes(C))/binary>> || C <- L>>.
+
+-spec read_codepoint(binary()) -> {unichar(), binary(), binary()}.
+read_codepoint(Bin = <<2#0:1, C:7, Rest/binary>>) ->
+ %% U+0000 - U+007F - 7 bits
+ <<B:1/binary, _/binary>> = Bin,
+ {C, B, Rest};
+read_codepoint(Bin = <<2#110:3, B1:5,
+ 2#10:2, B0:6,
+ Rest/binary>>) ->
+ %% U+0080 - U+07FF - 11 bits
+ case <<B1:5, B0:6>> of
+ <<C:11>> when C >= 16#80 ->
+ <<B:2/binary, _/binary>> = Bin,
+ {C, B, Rest}
+ end;
+read_codepoint(Bin = <<2#1110:4, B2:4,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6,
+ Rest/binary>>) ->
+ %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
+ case <<B2:4, B1:6, B0:6>> of
+ <<C:16>> when (C >= 16#0800 andalso C =< 16#FFFF) andalso
+ (C < 16#D800 orelse C > 16#DFFF) ->
+ <<B:3/binary, _/binary>> = Bin,
+ {C, B, Rest}
+ end;
+read_codepoint(Bin = <<2#11110:5, B3:3,
+ 2#10:2, B2:6,
+ 2#10:2, B1:6,
+ 2#10:2, B0:6,
+ Rest/binary>>) ->
+ %% U+10000 - U+10FFFF - 21 bits
+ case <<B3:3, B2:6, B1:6, B0:6>> of
+ <<C:21>> when (C >= 16#010000 andalso C =< 16#10FFFF) ->
+ <<B:4/binary, _/binary>> = Bin,
+ {C, B, Rest}
+ end.
+
+-spec codepoint_foldl(fun((unichar(), _) -> _), _, binary()) -> _.
+codepoint_foldl(F, Acc, <<>>) when is_function(F, 2) ->
+ Acc;
+codepoint_foldl(F, Acc, Bin) ->
+ {C, _, Rest} = read_codepoint(Bin),
+ codepoint_foldl(F, F(C, Acc), Rest).
+
+-spec bytes_foldl(fun((binary(), _) -> _), _, binary()) -> _.
+bytes_foldl(F, Acc, <<>>) when is_function(F, 2) ->
+ Acc;
+bytes_foldl(F, Acc, Bin) ->
+ {_, B, Rest} = read_codepoint(Bin),
+ bytes_foldl(F, F(B, Acc), Rest).
+
+-spec bytes_to_codepoints(binary()) -> [unichar()].
+bytes_to_codepoints(B) ->
+ lists:reverse(codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], B)).
+
+-spec len(binary()) -> non_neg_integer().
+len(<<>>) ->
+ 0;
+len(B) ->
+ {_, _, Rest} = read_codepoint(B),
+ 1 + len(Rest).
+
+-spec valid_utf8_bytes(B::binary()) -> binary().
+%% @doc Return only the bytes in B that represent valid UTF-8. Uses
+%% the following recursive algorithm: skip one byte if B does not
+%% follow UTF-8 syntax (a 1-4 byte encoding of some number),
+%% skip sequence of 2-4 bytes if it represents an overlong encoding
+%% or bad code point (surrogate U+D800 - U+DFFF or > U+10FFFF).
+valid_utf8_bytes(B) when is_binary(B) ->
+ binary_skip_bytes(B, invalid_utf8_indexes(B)).
+
+%% Internal API
+
+-spec binary_skip_bytes(binary(), [non_neg_integer()]) -> binary().
+%% @doc Return B, but skipping the 0-based indexes in L.
+binary_skip_bytes(B, []) ->
+ B;
+binary_skip_bytes(B, L) ->
+ binary_skip_bytes(B, L, 0, []).
+
+%% @private
+-spec binary_skip_bytes(binary(), [non_neg_integer()], non_neg_integer(), iolist()) -> binary().
+binary_skip_bytes(B, [], _N, Acc) ->
+ iolist_to_binary(lists:reverse([B | Acc]));
+binary_skip_bytes(<<_, RestB/binary>>, [N | RestL], N, Acc) ->
+ binary_skip_bytes(RestB, RestL, 1 + N, Acc);
+binary_skip_bytes(<<C, RestB/binary>>, L, N, Acc) ->
+ binary_skip_bytes(RestB, L, 1 + N, [C | Acc]).
+
+-spec invalid_utf8_indexes(binary()) -> [non_neg_integer()].
+%% @doc Return the 0-based indexes in B that are not valid UTF-8.
+invalid_utf8_indexes(B) ->
+ invalid_utf8_indexes(B, 0, []).
+
+%% @private.
+-spec invalid_utf8_indexes(binary(), non_neg_integer(), [non_neg_integer()]) -> [non_neg_integer()].
+invalid_utf8_indexes(<<C, Rest/binary>>, N, Acc) when C < 16#80 ->
+ %% U+0000 - U+007F - 7 bits
+ invalid_utf8_indexes(Rest, 1 + N, Acc);
+invalid_utf8_indexes(<<C1, C2, Rest/binary>>, N, Acc)
+ when C1 band 16#E0 =:= 16#C0,
+ C2 band 16#C0 =:= 16#80 ->
+ %% U+0080 - U+07FF - 11 bits
+ case ((C1 band 16#1F) bsl 6) bor (C2 band 16#3F) of
+ C when C < 16#80 ->
+ %% Overlong encoding.
+ invalid_utf8_indexes(Rest, 2 + N, [1 + N, N | Acc]);
+ _ ->
+ %% Upper bound U+07FF does not need to be checked
+ invalid_utf8_indexes(Rest, 2 + N, Acc)
+ end;
+invalid_utf8_indexes(<<C1, C2, C3, Rest/binary>>, N, Acc)
+ when C1 band 16#F0 =:= 16#E0,
+ C2 band 16#C0 =:= 16#80,
+ C3 band 16#C0 =:= 16#80 ->
+ %% U+0800 - U+FFFF - 16 bits
+ case ((((C1 band 16#0F) bsl 6) bor (C2 band 16#3F)) bsl 6) bor
+ (C3 band 16#3F) of
+ C when (C < 16#800) orelse (C >= 16#D800 andalso C =< 16#DFFF) ->
+ %% Overlong encoding or surrogate.
+ invalid_utf8_indexes(Rest, 3 + N, [2 + N, 1 + N, N | Acc]);
+ _ ->
+ %% Upper bound U+FFFF does not need to be checked
+ invalid_utf8_indexes(Rest, 3 + N, Acc)
+ end;
+invalid_utf8_indexes(<<C1, C2, C3, C4, Rest/binary>>, N, Acc)
+ when C1 band 16#F8 =:= 16#F0,
+ C2 band 16#C0 =:= 16#80,
+ C3 band 16#C0 =:= 16#80,
+ C4 band 16#C0 =:= 16#80 ->
+ %% U+10000 - U+10FFFF - 21 bits
+ case ((((((C1 band 16#0F) bsl 6) bor (C2 band 16#3F)) bsl 6) bor
+ (C3 band 16#3F)) bsl 6) bor (C4 band 16#3F) of
+ C when (C < 16#10000) orelse (C > 16#10FFFF) ->
+ %% Overlong encoding or invalid code point.
+ invalid_utf8_indexes(Rest, 4 + N, [3 + N, 2 + N, 1 + N, N | Acc]);
+ _ ->
+ invalid_utf8_indexes(Rest, 4 + N, Acc)
+ end;
+invalid_utf8_indexes(<<_, Rest/binary>>, N, Acc) ->
+ %% Invalid char
+ invalid_utf8_indexes(Rest, 1 + N, [N | Acc]);
+invalid_utf8_indexes(<<>>, _N, Acc) ->
+ lists:reverse(Acc).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+binary_skip_bytes_test() ->
+ ?assertEqual(<<"foo">>,
+ binary_skip_bytes(<<"foo">>, [])),
+ ?assertEqual(<<"foobar">>,
+ binary_skip_bytes(<<"foo bar">>, [3])),
+ ?assertEqual(<<"foo">>,
+ binary_skip_bytes(<<"foo bar">>, [3, 4, 5, 6])),
+ ?assertEqual(<<"oo bar">>,
+ binary_skip_bytes(<<"foo bar">>, [0])),
+ ok.
+
+invalid_utf8_indexes_test() ->
+ ?assertEqual(
+ [],
+ invalid_utf8_indexes(<<"unicode snowman for you: ", 226, 152, 131>>)),
+ ?assertEqual(
+ [0],
+ invalid_utf8_indexes(<<128>>)),
+ ?assertEqual(
+ [57,59,60,64,66,67],
+ invalid_utf8_indexes(<<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (",
+ 167, 65, 170, 186, 73, 83, 80, 166, 87, 186, 217, 41, 41>>)),
+ ok.
+
+codepoint_to_bytes_test() ->
+ %% U+0000 - U+007F - 7 bits
+ %% U+0080 - U+07FF - 11 bits
+ %% U+0800 - U+FFFF - 16 bits (excluding UTC-16 surrogate code points)
+ %% U+10000 - U+10FFFF - 21 bits
+ ?assertEqual(
+ <<"a">>,
+ codepoint_to_bytes($a)),
+ ?assertEqual(
+ <<16#c2, 16#80>>,
+ codepoint_to_bytes(16#80)),
+ ?assertEqual(
+ <<16#df, 16#bf>>,
+ codepoint_to_bytes(16#07ff)),
+ ?assertEqual(
+ <<16#ef, 16#bf, 16#bf>>,
+ codepoint_to_bytes(16#ffff)),
+ ?assertEqual(
+ <<16#f4, 16#8f, 16#bf, 16#bf>>,
+ codepoint_to_bytes(16#10ffff)),
+ ok.
+
+bytes_foldl_test() ->
+ ?assertEqual(
+ <<"abc">>,
+ bytes_foldl(fun (B, Acc) -> <<Acc/binary, B/binary>> end, <<>>, <<"abc">>)),
+ ?assertEqual(
+ <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>,
+ bytes_foldl(fun (B, Acc) -> <<Acc/binary, B/binary>> end, <<>>,
+ <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
+ ok.
+
+bytes_to_codepoints_test() ->
+ ?assertEqual(
+ "abc" ++ [16#2603, 16#4e2d, 16#85, 16#10ffff],
+ bytes_to_codepoints(<<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
+ ok.
+
+codepoint_foldl_test() ->
+ ?assertEqual(
+ "cba",
+ codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [], <<"abc">>)),
+ ?assertEqual(
+ [16#10ffff, 16#85, 16#4e2d, 16#2603 | "cba"],
+ codepoint_foldl(fun (C, Acc) -> [C | Acc] end, [],
+ <<"abc", 226, 152, 131, 228, 184, 173, 194, 133, 244,143,191,191>>)),
+ ok.
+
+len_test() ->
+ ?assertEqual(
+ 29,
+ len(<<"unicode snowman for you: ", 226, 152, 131, 228, 184, 173, 194, 133, 244, 143, 191, 191>>)),
+ ok.
+
+codepoints_to_bytes_test() ->
+ ?assertEqual(
+ iolist_to_binary(lists:map(fun codepoint_to_bytes/1, lists:seq(1, 1000))),
+ codepoints_to_bytes(lists:seq(1, 1000))),
+ ok.
+
+valid_utf8_bytes_test() ->
+ ?assertEqual(
+ <<"invalid U+11ffff: ">>,
+ valid_utf8_bytes(<<"invalid U+11ffff: ", 244, 159, 191, 191>>)),
+ ?assertEqual(
+ <<"U+10ffff: ", 244, 143, 191, 191>>,
+ valid_utf8_bytes(<<"U+10ffff: ", 244, 143, 191, 191>>)),
+ ?assertEqual(
+ <<"overlong 2-byte encoding (a): ">>,
+ valid_utf8_bytes(<<"overlong 2-byte encoding (a): ", 2#11000001, 2#10100001>>)),
+ ?assertEqual(
+ <<"overlong 2-byte encoding (!): ">>,
+ valid_utf8_bytes(<<"overlong 2-byte encoding (!): ", 2#11000000, 2#10100001>>)),
+ ?assertEqual(
+ <<"mu: ", 194, 181>>,
+ valid_utf8_bytes(<<"mu: ", 194, 181>>)),
+ ?assertEqual(
+ <<"bad coding bytes: ">>,
+ valid_utf8_bytes(<<"bad coding bytes: ", 2#10011111, 2#10111111, 2#11111111>>)),
+ ?assertEqual(
+ <<"low surrogate (unpaired): ">>,
+ valid_utf8_bytes(<<"low surrogate (unpaired): ", 237, 176, 128>>)),
+ ?assertEqual(
+ <<"high surrogate (unpaired): ">>,
+ valid_utf8_bytes(<<"high surrogate (unpaired): ", 237, 191, 191>>)),
+ ?assertEqual(
+ <<"unicode snowman for you: ", 226, 152, 131>>,
+ valid_utf8_bytes(<<"unicode snowman for you: ", 226, 152, 131>>)),
+ ?assertEqual(
+ <<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (AISPW))">>,
+ valid_utf8_bytes(<<"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; (",
+ 167, 65, 170, 186, 73, 83, 80, 166, 87, 186, 217, 41, 41>>)),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb.app.in b/src/mochiweb/mochiweb.app.in
index b0f90144..c6a2630b 100644
--- a/src/mochiweb/mochiweb.app.in
+++ b/src/mochiweb/mochiweb.app.in
@@ -1,6 +1,6 @@
{application, mochiweb,
[{description, "MochiMedia Web Server"},
- {vsn, "113"},
+ {vsn, "7c2bc2"},
{modules, [
mochihex,
mochijson,
diff --git a/src/mochiweb/mochiweb.app.src b/src/mochiweb/mochiweb.app.src
new file mode 100644
index 00000000..a1c95aae
--- /dev/null
+++ b/src/mochiweb/mochiweb.app.src
@@ -0,0 +1,9 @@
+%% This is generated from src/mochiweb.app.src
+{application, mochiweb,
+ [{description, "MochiMedia Web Server"},
+ {vsn, "7c2bc2"},
+ {modules, []},
+ {registered, []},
+ {mod, {mochiweb_app, []}},
+ {env, []},
+ {applications, [kernel, stdlib, crypto, inets]}]}.
diff --git a/src/mochiweb/mochiweb.erl b/src/mochiweb/mochiweb.erl
index 0f4d52a6..3118028b 100644
--- a/src/mochiweb/mochiweb.erl
+++ b/src/mochiweb/mochiweb.erl
@@ -9,7 +9,6 @@
-export([start/0, stop/0]).
-export([new_request/1, new_response/1]).
-export([all_loaded/0, all_loaded/1, reload/0]).
--export([test/0]).
%% @spec start() -> ok
%% @doc Start the MochiWeb server.
@@ -24,21 +23,6 @@ stop() ->
application:stop(crypto),
Res.
-%% @spec test() -> ok
-%% @doc Run all of the tests for MochiWeb.
-test() ->
- mochiweb_util:test(),
- mochiweb_headers:test(),
- mochiweb_cookies:test(),
- mochihex:test(),
- mochinum:test(),
- mochijson:test(),
- mochiweb_charref:test(),
- mochiweb_html:test(),
- mochifmt:test(),
- test_request(),
- ok.
-
reload() ->
[c:l(Module) || Module <- all_loaded()].
@@ -96,11 +80,6 @@ new_response({Request, Code, Headers}) ->
%% Internal API
-test_request() ->
- R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
- "/foo/bar/baz wibble quux" = R:get(path),
- ok.
-
ensure_started(App) ->
case application:start(App) of
ok ->
@@ -108,3 +87,203 @@ ensure_started(App) ->
{error, {already_started, App}} ->
ok
end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+-record(treq, {path, body= <<>>, xreply= <<>>}).
+
+ssl_cert_opts() ->
+ EbinDir = filename:dirname(code:which(?MODULE)),
+ CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
+ CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
+ KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
+ [{certfile, CertFile}, {keyfile, KeyFile}].
+
+with_server(Transport, ServerFun, ClientFun) ->
+ ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
+ ServerOpts = case Transport of
+ plain ->
+ ServerOpts0;
+ ssl ->
+ ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
+ end,
+ {ok, Server} = mochiweb_http:start(ServerOpts),
+ Port = mochiweb_socket_server:get(Server, port),
+ Res = (catch ClientFun(Transport, Port)),
+ mochiweb_http:stop(Server),
+ Res.
+
+request_test() ->
+ R = mochiweb_request:new(z, z, "/foo/bar/baz%20wibble+quux?qs=2", z, []),
+ "/foo/bar/baz wibble quux" = R:get(path),
+ ok.
+
+single_http_GET_test() ->
+ do_GET(plain, 1).
+
+single_https_GET_test() ->
+ do_GET(ssl, 1).
+
+multiple_http_GET_test() ->
+ do_GET(plain, 3).
+
+multiple_https_GET_test() ->
+ do_GET(ssl, 3).
+
+hundred_http_GET_test() ->
+ do_GET(plain, 100).
+
+hundred_https_GET_test() ->
+ do_GET(ssl, 100).
+
+single_128_http_POST_test() ->
+ do_POST(plain, 128, 1).
+
+single_128_https_POST_test() ->
+ do_POST(ssl, 128, 1).
+
+single_2k_http_POST_test() ->
+ do_POST(plain, 2048, 1).
+
+single_2k_https_POST_test() ->
+ do_POST(ssl, 2048, 1).
+
+single_100k_http_POST_test() ->
+ do_POST(plain, 102400, 1).
+
+single_100k_https_POST_test() ->
+ do_POST(ssl, 102400, 1).
+
+multiple_100k_http_POST_test() ->
+ do_POST(plain, 102400, 3).
+
+multiple_100K_https_POST_test() ->
+ do_POST(ssl, 102400, 3).
+
+hundred_128_http_POST_test() ->
+ do_POST(plain, 128, 100).
+
+hundred_128_https_POST_test() ->
+ do_POST(ssl, 128, 100).
+
+do_GET(Transport, Times) ->
+ PathPrefix = "/whatever/",
+ ReplyPrefix = "You requested: ",
+ ServerFun = fun (Req) ->
+ Reply = ReplyPrefix ++ Req:get(path),
+ Req:ok({"text/plain", Reply})
+ end,
+ TestReqs = [begin
+ Path = PathPrefix ++ integer_to_list(N),
+ ExpectedReply = list_to_binary(ReplyPrefix ++ Path),
+ #treq{path=Path, xreply=ExpectedReply}
+ end || N <- lists:seq(1, Times)],
+ ClientFun = new_client_fun('GET', TestReqs),
+ ok = with_server(Transport, ServerFun, ClientFun),
+ ok.
+
+do_POST(Transport, Size, Times) ->
+ ServerFun = fun (Req) ->
+ Body = Req:recv_body(),
+ Headers = [{"Content-Type", "application/octet-stream"}],
+ Req:respond({201, Headers, Body})
+ end,
+ TestReqs = [begin
+ Path = "/stuff/" ++ integer_to_list(N),
+ Body = crypto:rand_bytes(Size),
+ #treq{path=Path, body=Body, xreply=Body}
+ end || N <- lists:seq(1, Times)],
+ ClientFun = new_client_fun('POST', TestReqs),
+ ok = with_server(Transport, ServerFun, ClientFun),
+ ok.
+
+new_client_fun(Method, TestReqs) ->
+ fun (Transport, Port) ->
+ client_request(Transport, Port, Method, TestReqs)
+ end.
+
+client_request(Transport, Port, Method, TestReqs) ->
+ Opts = [binary, {active, false}, {packet, http}],
+ SockFun = case Transport of
+ plain ->
+ {ok, Socket} = gen_tcp:connect("127.0.0.1", Port, Opts),
+ fun (recv) ->
+ gen_tcp:recv(Socket, 0);
+ ({recv, Length}) ->
+ gen_tcp:recv(Socket, Length);
+ ({send, Data}) ->
+ gen_tcp:send(Socket, Data);
+ ({setopts, L}) ->
+ inet:setopts(Socket, L)
+ end;
+ ssl ->
+ {ok, Socket} = ssl:connect("127.0.0.1", Port, [{ssl_imp, new} | Opts]),
+ fun (recv) ->
+ ssl:recv(Socket, 0);
+ ({recv, Length}) ->
+ ssl:recv(Socket, Length);
+ ({send, Data}) ->
+ ssl:send(Socket, Data);
+ ({setopts, L}) ->
+ ssl:setopts(Socket, L)
+ end
+ end,
+ client_request(SockFun, Method, TestReqs).
+
+client_request(SockFun, _Method, []) ->
+ {the_end, {error, closed}} = {the_end, SockFun(recv)},
+ ok;
+client_request(SockFun, Method,
+ [#treq{path=Path, body=Body, xreply=ExReply} | Rest]) ->
+ Request = [atom_to_list(Method), " ", Path, " HTTP/1.1\r\n",
+ client_headers(Body, Rest =:= []),
+ "\r\n",
+ Body],
+ ok = SockFun({send, Request}),
+ case Method of
+ 'GET' ->
+ {ok, {http_response, {1,1}, 200, "OK"}} = SockFun(recv);
+ 'POST' ->
+ {ok, {http_response, {1,1}, 201, "Created"}} = SockFun(recv)
+ end,
+ ok = SockFun({setopts, [{packet, httph}]}),
+ {ok, {http_header, _, 'Server', _, "MochiWeb" ++ _}} = SockFun(recv),
+ {ok, {http_header, _, 'Date', _, _}} = SockFun(recv),
+ {ok, {http_header, _, 'Content-Type', _, _}} = SockFun(recv),
+ {ok, {http_header, _, 'Content-Length', _, ConLenStr}} = SockFun(recv),
+ ContentLength = list_to_integer(ConLenStr),
+ {ok, http_eoh} = SockFun(recv),
+ ok = SockFun({setopts, [{packet, raw}]}),
+ {payload, ExReply} = {payload, drain_reply(SockFun, ContentLength, <<>>)},
+ ok = SockFun({setopts, [{packet, http}]}),
+ client_request(SockFun, Method, Rest).
+
+client_headers(Body, IsLastRequest) ->
+ ["Host: localhost\r\n",
+ case Body of
+ <<>> ->
+ "";
+ _ ->
+ ["Content-Type: application/octet-stream\r\n",
+ "Content-Length: ", integer_to_list(byte_size(Body)), "\r\n"]
+ end,
+ case IsLastRequest of
+ true ->
+ "Connection: close\r\n";
+ false ->
+ ""
+ end].
+
+drain_reply(_SockFun, 0, Acc) ->
+ Acc;
+drain_reply(SockFun, Length, Acc) ->
+ Sz = erlang:min(Length, 1024),
+ {ok, B} = SockFun({recv, Sz}),
+ drain_reply(SockFun, Length - Sz, <<Acc/bytes, B/bytes>>).
+
+-endif.
diff --git a/src/mochiweb/mochiweb_acceptor.erl b/src/mochiweb/mochiweb_acceptor.erl
new file mode 100644
index 00000000..79d172c3
--- /dev/null
+++ b/src/mochiweb/mochiweb_acceptor.erl
@@ -0,0 +1,48 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc MochiWeb acceptor.
+
+-module(mochiweb_acceptor).
+-author('bob@mochimedia.com').
+
+-include("internal.hrl").
+
+-export([start_link/3, init/3]).
+
+start_link(Server, Listen, Loop) ->
+ proc_lib:spawn_link(?MODULE, init, [Server, Listen, Loop]).
+
+init(Server, Listen, Loop) ->
+ T1 = now(),
+ case catch mochiweb_socket:accept(Listen) of
+ {ok, Socket} ->
+ gen_server:cast(Server, {accepted, self(), timer:now_diff(now(), T1)}),
+ call_loop(Loop, Socket);
+ {error, closed} ->
+ exit(normal);
+ {error, timeout} ->
+ exit(normal);
+ {error, esslaccept} ->
+ exit(normal);
+ Other ->
+ error_logger:error_report(
+ [{application, mochiweb},
+ "Accept failed error",
+ lists:flatten(io_lib:format("~p", [Other]))]),
+ exit({error, accept_failed})
+ end.
+
+call_loop({M, F}, Socket) ->
+ M:F(Socket);
+call_loop({M, F, A}, Socket) ->
+ erlang:apply(M, F, [Socket | A]);
+call_loop(Loop, Socket) ->
+ Loop(Socket).
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_app.erl b/src/mochiweb/mochiweb_app.erl
index 2b437f6c..5d67787b 100644
--- a/src/mochiweb/mochiweb_app.erl
+++ b/src/mochiweb/mochiweb_app.erl
@@ -18,3 +18,10 @@ start(_Type, _StartArgs) ->
%% @doc application stop callback for mochiweb.
stop(_State) ->
ok.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_charref.erl b/src/mochiweb/mochiweb_charref.erl
index d037d2f8..99cd5502 100644
--- a/src/mochiweb/mochiweb_charref.erl
+++ b/src/mochiweb/mochiweb_charref.erl
@@ -3,7 +3,7 @@
%% @doc Converts HTML 4 charrefs and entities to codepoints.
-module(mochiweb_charref).
--export([charref/1, test/0]).
+-export([charref/1]).
%% External API.
@@ -27,16 +27,6 @@ charref([$# | L]) ->
charref(L) ->
entity(L).
-%% @spec test() -> ok
-%% @doc Run tests for mochiweb_charref.
-test() ->
- 1234 = charref("#1234"),
- 255 = charref("#xfF"),
- 255 = charref("#XFf"),
- 38 = charref("amp"),
- undefined = charref("not_an_entity"),
- ok.
-
%% Internal API.
entity("nbsp") -> 160;
@@ -293,3 +283,26 @@ entity("rsaquo") -> 8250;
entity("euro") -> 8364;
entity(_) -> undefined.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+exhaustive_entity_test() ->
+ T = mochiweb_cover:clause_lookup_table(?MODULE, entity),
+ [?assertEqual(V, entity(K)) || {K, V} <- T].
+
+charref_test() ->
+ 1234 = charref("#1234"),
+ 255 = charref("#xfF"),
+ 255 = charref(<<"#XFf">>),
+ 38 = charref("amp"),
+ 38 = charref(<<"amp">>),
+ undefined = charref("not_an_entity"),
+ undefined = charref("#not_an_entity"),
+ undefined = charref("#xnot_an_entity"),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_cookies.erl b/src/mochiweb/mochiweb_cookies.erl
index 61711ff0..c090b714 100644
--- a/src/mochiweb/mochiweb_cookies.erl
+++ b/src/mochiweb/mochiweb_cookies.erl
@@ -4,7 +4,7 @@
%% @doc HTTP Cookie parsing and generating (RFC 2109, RFC 2965).
-module(mochiweb_cookies).
--export([parse_cookie/1, cookie/3, cookie/2, test/0]).
+-export([parse_cookie/1, cookie/3, cookie/2]).
-define(QUOTE, $\").
@@ -130,13 +130,6 @@ parse_cookie("") ->
parse_cookie(Cookie) ->
parse_cookie(Cookie, []).
-%% @spec test() -> ok
-%% @doc Run tests for mochiweb_cookies.
-test() ->
- parse_cookie_test(),
- cookie_test(),
- ok.
-
%% Internal API
parse_cookie([], Acc) ->
@@ -198,24 +191,6 @@ skip_past_separator([$, | Rest]) ->
skip_past_separator([_ | Rest]) ->
skip_past_separator(Rest).
-parse_cookie_test() ->
- %% RFC example
- C1 = "$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
- Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
- Shipping=\"FedEx\"; $Path=\"/acme\"",
- [
- {"Customer","WILE_E_COYOTE"},
- {"Part_Number","Rocket_Launcher_0001"},
- {"Shipping","FedEx"}
- ] = parse_cookie(C1),
- %% Potential edge cases
- [{"foo", "x"}] = parse_cookie("foo=\"\\x\""),
- [] = parse_cookie("="),
- [{"foo", ""}, {"bar", ""}] = parse_cookie(" foo ; bar "),
- [{"foo", ""}, {"bar", ""}] = parse_cookie("foo=;bar="),
- [{"foo", "\";"}, {"bar", ""}] = parse_cookie("foo = \"\\\";\";bar "),
- [{"foo", "\";bar"}] = parse_cookie("foo=\"\\\";bar").
-
any_to_list(V) when is_list(V) ->
V;
any_to_list(V) when is_atom(V) ->
@@ -225,6 +200,81 @@ any_to_list(V) when is_binary(V) ->
any_to_list(V) when is_integer(V) ->
integer_to_list(V).
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+quote_test() ->
+ %% ?assertError eunit macro is not compatible with coverage module
+ try quote(":wq")
+ catch error:{cookie_quoting_required, ":wq"} -> ok
+ end,
+ ?assertEqual(
+ "foo",
+ quote(foo)),
+ ok.
+
+parse_cookie_test() ->
+ %% RFC example
+ C1 = "$Version=\"1\"; Customer=\"WILE_E_COYOTE\"; $Path=\"/acme\";
+ Part_Number=\"Rocket_Launcher_0001\"; $Path=\"/acme\";
+ Shipping=\"FedEx\"; $Path=\"/acme\"",
+ ?assertEqual(
+ [{"Customer","WILE_E_COYOTE"},
+ {"Part_Number","Rocket_Launcher_0001"},
+ {"Shipping","FedEx"}],
+ parse_cookie(C1)),
+ %% Potential edge cases
+ ?assertEqual(
+ [{"foo", "x"}],
+ parse_cookie("foo=\"\\x\"")),
+ ?assertEqual(
+ [],
+ parse_cookie("=")),
+ ?assertEqual(
+ [{"foo", ""}, {"bar", ""}],
+ parse_cookie(" foo ; bar ")),
+ ?assertEqual(
+ [{"foo", ""}, {"bar", ""}],
+ parse_cookie("foo=;bar=")),
+ ?assertEqual(
+ [{"foo", "\";"}, {"bar", ""}],
+ parse_cookie("foo = \"\\\";\";bar ")),
+ ?assertEqual(
+ [{"foo", "\";bar"}],
+ parse_cookie("foo=\"\\\";bar")),
+ ?assertEqual(
+ [],
+ parse_cookie([])),
+ ?assertEqual(
+ [{"foo", "bar"}, {"baz", "wibble"}],
+ parse_cookie("foo=bar , baz=wibble ")),
+ ok.
+
+domain_test() ->
+ ?assertEqual(
+ {"Set-Cookie",
+ "Customer=WILE_E_COYOTE; "
+ "Version=1; "
+ "Domain=acme.com; "
+ "HttpOnly"},
+ cookie("Customer", "WILE_E_COYOTE",
+ [{http_only, true}, {domain, "acme.com"}])),
+ ok.
+
+local_time_test() ->
+ {"Set-Cookie", S} = cookie("Customer", "WILE_E_COYOTE",
+ [{max_age, 111}, {secure, true}]),
+ ?assertMatch(
+ ["Customer=WILE_E_COYOTE",
+ " Version=1",
+ " Expires=" ++ _,
+ " Max-Age=111",
+ " Secure"],
+ string:tokens(S, ";")),
+ ok.
cookie_test() ->
C1 = {"Set-Cookie",
@@ -238,8 +288,8 @@ cookie_test() ->
C1 = cookie(<<"Customer">>, <<"WILE_E_COYOTE">>, [{path, <<"/acme">>}]),
{"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey", []),
-
- LocalTime = calendar:universal_time_to_local_time({{2007, 5, 15}, {13, 45, 33}}),
+ {"Set-Cookie","=NoKey; Version=1"} = cookie("", "NoKey"),
+ LocalTime = calendar:universal_time_to_local_time({{2007, 5, 15}, {13, 45, 33}}),
C2 = {"Set-Cookie",
"Customer=WILE_E_COYOTE; "
"Version=1; "
@@ -255,3 +305,5 @@ cookie_test() ->
C3 = cookie("Customer", "WILE_E_COYOTE",
[{max_age, 86417}, {local_time, LocalTime}]),
ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_cover.erl b/src/mochiweb/mochiweb_cover.erl
new file mode 100644
index 00000000..6a14ef51
--- /dev/null
+++ b/src/mochiweb/mochiweb_cover.erl
@@ -0,0 +1,75 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc Workarounds for various cover deficiencies.
+-module(mochiweb_cover).
+-export([get_beam/1, get_abstract_code/1,
+ get_clauses/2, clause_lookup_table/1]).
+-export([clause_lookup_table/2]).
+
+%% Internal
+
+get_beam(Module) ->
+ {Module, Beam, _Path} = code:get_object_code(Module),
+ Beam.
+
+get_abstract_code(Beam) ->
+ {ok, {_Module,
+ [{abstract_code,
+ {raw_abstract_v1, L}}]}} = beam_lib:chunks(Beam, [abstract_code]),
+ L.
+
+get_clauses(Function, Code) ->
+ [L] = [Clauses || {function, _, FName, _, Clauses}
+ <- Code, FName =:= Function],
+ L.
+
+clause_lookup_table(Module, Function) ->
+ clause_lookup_table(
+ get_clauses(Function,
+ get_abstract_code(get_beam(Module)))).
+
+clause_lookup_table(Clauses) ->
+ lists:foldr(fun clause_fold/2, [], Clauses).
+
+clause_fold({clause, _,
+ [InTerm],
+ _Guards=[],
+ [OutTerm]},
+ Acc) ->
+ try [{erl_parse:normalise(InTerm), erl_parse:normalise(OutTerm)} | Acc]
+ catch error:_ -> Acc
+ end;
+clause_fold(_, Acc) ->
+ Acc.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+foo_table(a) -> b;
+foo_table("a") -> <<"b">>;
+foo_table(123) -> {4, 3, 2};
+foo_table([list]) -> [];
+foo_table([list1, list2]) -> [list1, list2, list3];
+foo_table(ignored) -> some, code, ignored;
+foo_table(Var) -> Var.
+
+foo_table_test() ->
+ T = clause_lookup_table(?MODULE, foo_table),
+ [?assertEqual(V, foo_table(K)) || {K, V} <- T].
+
+clause_lookup_table_test() ->
+ ?assertEqual(b, foo_table(a)),
+ ?assertEqual(ignored, foo_table(ignored)),
+ ?assertEqual('Var', foo_table('Var')),
+ ?assertEqual(
+ [{a, b},
+ {"a", <<"b">>},
+ {123, {4, 3, 2}},
+ {[list], []},
+ {[list1, list2], [list1, list2, list3]}],
+ clause_lookup_table(?MODULE, foo_table)).
+
+-endif.
diff --git a/src/mochiweb/mochiweb_echo.erl b/src/mochiweb/mochiweb_echo.erl
index f32d6803..6f7872b9 100644
--- a/src/mochiweb/mochiweb_echo.erl
+++ b/src/mochiweb/mochiweb_echo.erl
@@ -18,9 +18,9 @@ start() ->
{loop, {?MODULE, loop}}]).
loop(Socket) ->
- case gen_tcp:recv(Socket, 0, 30000) of
+ case mochiweb_socket:recv(Socket, 0, 30000) of
{ok, Data} ->
- case gen_tcp:send(Socket, Data) of
+ case mochiweb_socket:send(Socket, Data) of
ok ->
loop(Socket);
_ ->
@@ -29,3 +29,10 @@ loop(Socket) ->
_Other ->
exit(normal)
end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_headers.erl b/src/mochiweb/mochiweb_headers.erl
index d90fd679..4fce9838 100644
--- a/src/mochiweb/mochiweb_headers.erl
+++ b/src/mochiweb/mochiweb_headers.erl
@@ -10,66 +10,11 @@
-export([default/3, enter_from_list/2, default_from_list/2]).
-export([to_list/1, make/1]).
-export([from_binary/1]).
--export([test/0]).
%% @type headers().
%% @type key() = atom() | binary() | string().
%% @type value() = atom() | binary() | string() | integer().
-%% @spec test() -> ok
-%% @doc Run tests for this module.
-test() ->
- H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
- [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
- H1 = ?MODULE:insert(taco, grande, H),
- [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
- H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
- [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
- H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
- [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
- "foo, bar" = ?MODULE:get_value("set-cookie", H3),
- {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
- undefined = ?MODULE:get_value("shibby", H3),
- none = ?MODULE:lookup("shibby", H3),
- H4 = ?MODULE:insert("content-type",
- "application/x-www-form-urlencoded; charset=utf8",
- H3),
- "application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
- "content-type", H4),
- H4 = ?MODULE:delete_any("nonexistent-header", H4),
- H3 = ?MODULE:delete_any("content-type", H4),
- HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
- H_HB = ?MODULE:from_binary(HB),
- H_HB = ?MODULE:from_binary(binary_to_list(HB)),
- "47" = ?MODULE:get_value("Content-Length", H_HB),
- "text/plain" = ?MODULE:get_value("Content-Type", H_HB),
- L_H_HB = ?MODULE:to_list(H_HB),
- 2 = length(L_H_HB),
- true = lists:member({'Content-Length', "47"}, L_H_HB),
- true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
- HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
- HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
- HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
- H_HL = ?MODULE:from_binary(HL),
- H_HL = ?MODULE:from_binary(HL2),
- H_HL = ?MODULE:from_binary(HL3),
- "47" = ?MODULE:get_value("Content-Length", H_HL),
- "text/plain" = ?MODULE:get_value("Content-Type", H_HL),
- L_H_HL = ?MODULE:to_list(H_HL),
- 2 = length(L_H_HL),
- true = lists:member({'Content-Length', "47"}, L_H_HL),
- true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
- [] = ?MODULE:to_list(?MODULE:from_binary("")),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
- [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
- ok.
-
%% @spec empty() -> headers()
%% @doc Create an empty headers structure.
empty() ->
@@ -83,35 +28,34 @@ make(L) when is_list(L) ->
make(T) when is_tuple(T) ->
T.
-%% @spec from_binary(RawHttpHeader()) -> headers()
-%% @type RawHttpHeader() -> string() | binary() | [ string() | binary() ]
-%%
+%% @spec from_binary(iolist()) -> headers()
%% @doc Transforms a raw HTTP header into a mochiweb headers structure.
%%
%% The given raw HTTP header can be one of the following:
%%
-%% 1) A string or a binary representing a full HTTP header ending with
+%% 1) A string or a binary representing a full HTTP header ending with
%% double CRLF.
%% Examples:
+%% ```
%% "Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n"
-%% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>
+%% <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>'''
%%
-%% 2) A list of binaries or strings where each element represents a raw
+%% 2) A list of binaries or strings where each element represents a raw
%% HTTP header line ending with a single CRLF.
%% Examples:
-%% [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ]
-%% [ "Content-Length: 47\r\n", "Content-Type: text/plain\r\n" ]
-%% [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ]
+%% ```
+%% [<<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">>]
+%% ["Content-Length: 47\r\n", "Content-Type: text/plain\r\n"]
+%% ["Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">>]'''
%%
from_binary(RawHttpHeader) when is_binary(RawHttpHeader) ->
from_binary(RawHttpHeader, []);
-
from_binary(RawHttpHeaderList) ->
from_binary(list_to_binary([RawHttpHeaderList, "\r\n"])).
from_binary(RawHttpHeader, Acc) ->
case erlang:decode_packet(httph, RawHttpHeader, []) of
- { ok, {http_header, _, H, _, V}, Rest } ->
+ {ok, {http_header, _, H, _, V}, Rest} ->
from_binary(Rest, [{H, V} | Acc]);
_ ->
make(Acc)
@@ -248,4 +192,108 @@ any_to_list(V) when is_binary(V) ->
any_to_list(V) when is_integer(V) ->
integer_to_list(V).
+%%
+%% Tests.
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+make_test() ->
+ Identity = make([{hdr, foo}]),
+ ?assertEqual(
+ Identity,
+ make(Identity)).
+
+enter_from_list_test() ->
+ H = make([{hdr, foo}]),
+ ?assertEqual(
+ [{baz, "wibble"}, {hdr, "foo"}],
+ to_list(enter_from_list([{baz, wibble}], H))),
+ ?assertEqual(
+ [{hdr, "bar"}],
+ to_list(enter_from_list([{hdr, bar}], H))),
+ ok.
+
+default_from_list_test() ->
+ H = make([{hdr, foo}]),
+ ?assertEqual(
+ [{baz, "wibble"}, {hdr, "foo"}],
+ to_list(default_from_list([{baz, wibble}], H))),
+ ?assertEqual(
+ [{hdr, "foo"}],
+ to_list(default_from_list([{hdr, bar}], H))),
+ ok.
+
+get_primary_value_test() ->
+ H = make([{hdr, foo}, {baz, <<"wibble;taco">>}]),
+ ?assertEqual(
+ "foo",
+ get_primary_value(hdr, H)),
+ ?assertEqual(
+ undefined,
+ get_primary_value(bar, H)),
+ ?assertEqual(
+ "wibble",
+ get_primary_value(<<"baz">>, H)),
+ ok.
+
+set_cookie_test() ->
+ H = make([{"set-cookie", foo}, {"set-cookie", bar}, {"set-cookie", baz}]),
+ ?assertEqual(
+ [{"set-cookie", "foo"}, {"set-cookie", "bar"}, {"set-cookie", "baz"}],
+ to_list(H)),
+ ok.
+
+headers_test() ->
+ H = ?MODULE:make([{hdr, foo}, {"Hdr", "bar"}, {'Hdr', 2}]),
+ [{hdr, "foo, bar, 2"}] = ?MODULE:to_list(H),
+ H1 = ?MODULE:insert(taco, grande, H),
+ [{hdr, "foo, bar, 2"}, {taco, "grande"}] = ?MODULE:to_list(H1),
+ H2 = ?MODULE:make([{"Set-Cookie", "foo"}]),
+ [{"Set-Cookie", "foo"}] = ?MODULE:to_list(H2),
+ H3 = ?MODULE:insert("Set-Cookie", "bar", H2),
+ [{"Set-Cookie", "foo"}, {"Set-Cookie", "bar"}] = ?MODULE:to_list(H3),
+ "foo, bar" = ?MODULE:get_value("set-cookie", H3),
+ {value, {"Set-Cookie", "foo, bar"}} = ?MODULE:lookup("set-cookie", H3),
+ undefined = ?MODULE:get_value("shibby", H3),
+ none = ?MODULE:lookup("shibby", H3),
+ H4 = ?MODULE:insert("content-type",
+ "application/x-www-form-urlencoded; charset=utf8",
+ H3),
+ "application/x-www-form-urlencoded" = ?MODULE:get_primary_value(
+ "content-type", H4),
+ H4 = ?MODULE:delete_any("nonexistent-header", H4),
+ H3 = ?MODULE:delete_any("content-type", H4),
+ HB = <<"Content-Length: 47\r\nContent-Type: text/plain\r\n\r\n">>,
+ H_HB = ?MODULE:from_binary(HB),
+ H_HB = ?MODULE:from_binary(binary_to_list(HB)),
+ "47" = ?MODULE:get_value("Content-Length", H_HB),
+ "text/plain" = ?MODULE:get_value("Content-Type", H_HB),
+ L_H_HB = ?MODULE:to_list(H_HB),
+ 2 = length(L_H_HB),
+ true = lists:member({'Content-Length', "47"}, L_H_HB),
+ true = lists:member({'Content-Type', "text/plain"}, L_H_HB),
+ HL = [ <<"Content-Length: 47\r\n">>, <<"Content-Type: text/plain\r\n">> ],
+ HL2 = [ "Content-Length: 47\r\n", <<"Content-Type: text/plain\r\n">> ],
+ HL3 = [ <<"Content-Length: 47\r\n">>, "Content-Type: text/plain\r\n" ],
+ H_HL = ?MODULE:from_binary(HL),
+ H_HL = ?MODULE:from_binary(HL2),
+ H_HL = ?MODULE:from_binary(HL3),
+ "47" = ?MODULE:get_value("Content-Length", H_HL),
+ "text/plain" = ?MODULE:get_value("Content-Type", H_HL),
+ L_H_HL = ?MODULE:to_list(H_HL),
+ 2 = length(L_H_HL),
+ true = lists:member({'Content-Length', "47"}, L_H_HL),
+ true = lists:member({'Content-Type', "text/plain"}, L_H_HL),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<>>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<"">>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n">>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary(<<"\r\n\r\n">>)),
+ [] = ?MODULE:to_list(?MODULE:from_binary("")),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<>>])),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<"">>])),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n">>])),
+ [] = ?MODULE:to_list(?MODULE:from_binary([<<"\r\n\r\n">>])),
+ ok.
+-endif.
diff --git a/src/mochiweb/mochiweb_html.erl b/src/mochiweb/mochiweb_html.erl
index 77100d50..a15c359c 100644
--- a/src/mochiweb/mochiweb_html.erl
+++ b/src/mochiweb/mochiweb_html.erl
@@ -4,9 +4,9 @@
%% @doc Loosely tokenizes and generates parse trees for HTML 4.
-module(mochiweb_html).
-export([tokens/1, parse/1, parse_tokens/1, to_tokens/1, escape/1,
- escape_attr/1, to_html/1, test/0]).
+ escape_attr/1, to_html/1]).
-% This is a macro to placate syntax highlighters..
+%% This is a macro to placate syntax highlighters..
-define(QUOTE, $\").
-define(SQUOTE, $\').
-define(ADV_COL(S, N),
@@ -35,6 +35,8 @@
-define(IS_LITERAL_SAFE(C),
((C >= $A andalso C =< $Z) orelse (C >= $a andalso C =< $z)
orelse (C >= $0 andalso C =< $9))).
+-define(PROBABLE_CLOSE(C),
+ (C =:= $> orelse ?IS_WHITESPACE(C))).
-record(decoder, {line=1,
column=1,
@@ -89,6 +91,7 @@ to_tokens(T={doctype, _}) ->
to_tokens(T={comment, _}) ->
[T];
to_tokens({Tag0, Acc}) ->
+ %% This is only allowed in sub-tags: {p, [{"class", "foo"}]}
to_tokens({Tag0, [], Acc});
to_tokens({Tag0, Attrs, Acc}) ->
Tag = to_tag(Tag0),
@@ -124,40 +127,6 @@ escape_attr(I) when is_integer(I) ->
escape_attr(F) when is_float(F) ->
escape_attr(mochinum:digits(F), []).
-%% @spec test() -> ok
-%% @doc Run tests for mochiweb_html.
-test() ->
- test_destack(),
- test_tokens(),
- test_tokens2(),
- test_parse(),
- test_parse2(),
- test_parse_tokens(),
- test_escape(),
- test_escape_attr(),
- test_to_html(),
- ok.
-
-
-%% Internal API
-
-test_to_html() ->
- Expect = <<"<html><head><title>hey!</title></head><body><p class=\"foo\">what's up<br /></p><div>sucka</div><!-- comment! --></body></html>">>,
- Expect = iolist_to_binary(
- to_html({html, [],
- [{<<"head">>, [],
- [{title, <<"hey!">>}]},
- {body, [],
- [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
- {'div', <<"sucka">>},
- {comment, <<" comment! ">>}]}]})),
- Expect1 = <<"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">">>,
- Expect1 = iolist_to_binary(
- to_html({doctype,
- [<<"html">>, <<"PUBLIC">>,
- <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
- <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]})),
- ok.
to_html([], Acc) ->
lists:reverse(Acc);
to_html([{'=', Content} | Rest], Acc) ->
@@ -205,16 +174,6 @@ attrs_to_html([{K, V} | Rest], Acc) ->
[[<<" ">>, escape(K), <<"=\"">>,
escape_attr(V), <<"\"">>] | Acc]).
-test_escape() ->
- <<"&amp;quot;\"word &lt;&lt;up!&amp;quot;">> =
- escape(<<"&quot;\"word <<up!&quot;">>),
- ok.
-
-test_escape_attr() ->
- <<"&amp;quot;&quot;word &lt;&lt;up!&amp;quot;">> =
- escape_attr(<<"&quot;\"word <<up!&quot;">>),
- ok.
-
escape([], Acc) ->
list_to_binary(lists:reverse(Acc));
escape("<" ++ Rest, Acc) ->
@@ -257,6 +216,9 @@ to_tokens([{Tag0, [T0={'=', _C0} | R1]} | Rest], Acc) ->
to_tokens([{Tag0, [T0={comment, _C0} | R1]} | Rest], Acc) ->
%% Allow {comment, iolist()}
to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
+to_tokens([{Tag0, [T0={pi, _S0, _A0} | R1]} | Rest], Acc) ->
+ %% Allow {pi, binary(), list()}
+ to_tokens([{Tag0, R1} | Rest], [T0 | Acc]);
to_tokens([{Tag0, [{T0, A0=[{_, _} | _]} | R1]} | Rest], Acc) ->
%% Allow {p, [{"class", "foo"}]}
to_tokens([{Tag0, [{T0, A0, []} | R1]} | Rest], Acc);
@@ -290,39 +252,6 @@ to_tokens([{Tag0, [B | R1]} | Rest], Acc) when is_binary(B) ->
Tag = to_tag(Tag0),
to_tokens([{Tag, R1} | Rest], [{data, B, false} | Acc]).
-test_tokens() ->
- [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"alice">>, <<"bob">>}], true}] =
- tokens(<<"<foo bar=baz wibble='wibble' alice=\"bob\"/>">>),
- [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
- {<<"wibble">>, <<"wibble">>},
- {<<"alice">>, <<"bob">>}], true}] =
- tokens(<<"<foo bar=baz wibble='wibble' alice=bob/>">>),
- [{comment, <<"[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]">>}] =
- tokens(<<"<!--[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]-->">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type=\"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type =\"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type = \"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
- {data, <<" A= B <= C ">>, false},
- {end_tag, <<"script">>}] =
- tokens(<<"<script type= \"text/javascript\"> A= B <= C </script>">>),
- [{start_tag, <<"textarea">>, [], false},
- {data, <<"<html></body>">>, false},
- {end_tag, <<"textarea">>}] =
- tokens(<<"<textarea><html></body></textarea>">>),
- ok.
-
tokens(B, S=#decoder{offset=O}, Acc) ->
case B of
<<_:O/binary>> ->
@@ -374,7 +303,8 @@ tokenize(B, S=#decoder{offset=O}) ->
{{end_tag, Tag}, S2};
<<_:O/binary, "<", C, _/binary>> when ?IS_WHITESPACE(C) ->
%% This isn't really strict HTML
- tokenize_data(B, ?INC_COL(S));
+ {{data, Data, _Whitespace}, S1} = tokenize_data(B, ?INC_COL(S)),
+ {{data, <<$<, Data/binary>>, false}, S1};
<<_:O/binary, "<", _/binary>> ->
{Tag, S1} = tokenize_literal(B, ?INC_COL(S)),
{Attrs, S2} = tokenize_attributes(B, S1),
@@ -385,149 +315,6 @@ tokenize(B, S=#decoder{offset=O}) ->
tokenize_data(B, S)
end.
-test_parse() ->
- D0 = <<"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">
-<html>
- <head>
- <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">
- <title>Foo</title>
- <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/rel/dojo/resources/dojo.css\" media=\"screen\">
- <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/foo.css\" media=\"screen\">
- <!--[if lt IE 7]>
- <style type=\"text/css\">
- .no_ie { display: none; }
- </style>
- <![endif]-->
- <link rel=\"icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- <link rel=\"shortcut icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
- </head>
- <body id=\"home\" class=\"tundra\"><![CDATA[&lt;<this<!-- is -->CDATA>&gt;]]></body>
-</html>">>,
- Expect = {<<"html">>, [],
- [{<<"head">>, [],
- [{<<"meta">>,
- [{<<"http-equiv">>,<<"Content-Type">>},
- {<<"content">>,<<"text/html; charset=UTF-8">>}],
- []},
- {<<"title">>,[],[<<"Foo">>]},
- {<<"link">>,
- [{<<"rel">>,<<"stylesheet">>},
- {<<"type">>,<<"text/css">>},
- {<<"href">>,<<"/static/rel/dojo/resources/dojo.css">>},
- {<<"media">>,<<"screen">>}],
- []},
- {<<"link">>,
- [{<<"rel">>,<<"stylesheet">>},
- {<<"type">>,<<"text/css">>},
- {<<"href">>,<<"/static/foo.css">>},
- {<<"media">>,<<"screen">>}],
- []},
- {comment,<<"[if lt IE 7]>\n <style type=\"text/css\">\n .no_ie { display: none; }\n </style>\n <![endif]">>},
- {<<"link">>,
- [{<<"rel">>,<<"icon">>},
- {<<"href">>,<<"/static/images/favicon.ico">>},
- {<<"type">>,<<"image/x-icon">>}],
- []},
- {<<"link">>,
- [{<<"rel">>,<<"shortcut icon">>},
- {<<"href">>,<<"/static/images/favicon.ico">>},
- {<<"type">>,<<"image/x-icon">>}],
- []}]},
- {<<"body">>,
- [{<<"id">>,<<"home">>},
- {<<"class">>,<<"tundra">>}],
- [<<"&lt;<this<!-- is -->CDATA>&gt;">>]}]},
- Expect = parse(D0),
- ok.
-
-test_tokens2() ->
- D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org</link><description>Bob's Rants</description></channel>">>,
- Expect = [{start_tag,<<"channel">>,[],false},
- {start_tag,<<"title">>,[],false},
- {data,<<"from __future__ import *">>,false},
- {end_tag,<<"title">>},
- {start_tag,<<"link">>,[],true},
- {data,<<"http://bob.pythonmac.org">>,false},
- {end_tag,<<"link">>},
- {start_tag,<<"description">>,[],false},
- {data,<<"Bob's Rants">>,false},
- {end_tag,<<"description">>},
- {end_tag,<<"channel">>}],
- Expect = tokens(D0),
- ok.
-
-test_parse2() ->
- D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org<br>foo</link><description>Bob's Rants</description></channel>">>,
- Expect = {<<"channel">>,[],
- [{<<"title">>,[],[<<"from __future__ import *">>]},
- {<<"link">>,[],[
- <<"http://bob.pythonmac.org">>,
- {<<"br">>,[],[]},
- <<"foo">>]},
- {<<"description">>,[],[<<"Bob's Rants">>]}]},
- Expect = parse(D0),
- ok.
-
-test_parse_tokens() ->
- D0 = [{doctype,[<<"HTML">>,<<"PUBLIC">>,<<"-//W3C//DTD HTML 4.01 Transitional//EN">>]},
- {data,<<"\n">>,true},
- {start_tag,<<"html">>,[],false}],
- {<<"html">>, [], []} = parse_tokens(D0),
- D1 = D0 ++ [{end_tag, <<"html">>}],
- {<<"html">>, [], []} = parse_tokens(D1),
- D2 = D0 ++ [{start_tag, <<"body">>, [], false}],
- {<<"html">>, [], [{<<"body">>, [], []}]} = parse_tokens(D2),
- D3 = D0 ++ [{start_tag, <<"head">>, [], false},
- {end_tag, <<"head">>},
- {start_tag, <<"body">>, [], false}],
- {<<"html">>, [], [{<<"head">>, [], []}, {<<"body">>, [], []}]} = parse_tokens(D3),
- D4 = D3 ++ [{data,<<"\n">>,true},
- {start_tag,<<"div">>,[{<<"class">>,<<"a">>}],false},
- {start_tag,<<"a">>,[{<<"name">>,<<"#anchor">>}],false},
- {end_tag,<<"a">>},
- {end_tag,<<"div">>},
- {start_tag,<<"div">>,[{<<"class">>,<<"b">>}],false},
- {start_tag,<<"div">>,[{<<"class">>,<<"c">>}],false},
- {end_tag,<<"div">>},
- {end_tag,<<"div">>}],
- {<<"html">>, [],
- [{<<"head">>, [], []},
- {<<"body">>, [],
- [{<<"div">>, [{<<"class">>, <<"a">>}], [{<<"a">>, [{<<"name">>, <<"#anchor">>}], []}]},
- {<<"div">>, [{<<"class">>, <<"b">>}], [{<<"div">>, [{<<"class">>, <<"c">>}], []}]}
- ]}]} = parse_tokens(D4),
- D5 = [{start_tag,<<"html">>,[],false},
- {data,<<"\n">>,true},
- {data,<<"boo">>,false},
- {data,<<"hoo">>,false},
- {data,<<"\n">>,true},
- {end_tag,<<"html">>}],
- {<<"html">>, [], [<<"\nboohoo\n">>]} = parse_tokens(D5),
- D6 = [{start_tag,<<"html">>,[],false},
- {data,<<"\n">>,true},
- {data,<<"\n">>,true},
- {end_tag,<<"html">>}],
- {<<"html">>, [], []} = parse_tokens(D6),
- D7 = [{start_tag,<<"html">>,[],false},
- {start_tag,<<"ul">>,[],false},
- {start_tag,<<"li">>,[],false},
- {data,<<"word">>,false},
- {start_tag,<<"li">>,[],false},
- {data,<<"up">>,false},
- {end_tag,<<"li">>},
- {start_tag,<<"li">>,[],false},
- {data,<<"fdsa">>,false},
- {start_tag,<<"br">>,[],true},
- {data,<<"asdf">>,false},
- {end_tag,<<"ul">>},
- {end_tag,<<"html">>}],
- {<<"html">>, [],
- [{<<"ul">>, [],
- [{<<"li">>, [], [<<"word">>]},
- {<<"li">>, [], [<<"up">>]},
- {<<"li">>, [], [<<"fdsa">>,{<<"br">>, [], []}, <<"asdf">>]}]}]} = parse_tokens(D7),
- ok.
-
tree_data([{data, Data, Whitespace} | Rest], AllWhitespace, Acc) ->
tree_data(Rest, (Whitespace andalso AllWhitespace), [Data | Acc]);
tree_data(Rest, AllWhitespace, Acc) ->
@@ -556,7 +343,9 @@ tree(L=[{data, _Data, _Whitespace} | _], S) ->
tree(Rest, S);
{Data, false, Rest} ->
tree(Rest, append_stack_child(Data, S))
- end.
+ end;
+tree([{doctype, _} | Rest], Stack) ->
+ tree(Rest, Stack).
norm({Tag, Attrs}) ->
{norm(Tag), [{norm(K), iolist_to_binary(V)} || {K, V} <- Attrs], []};
@@ -565,21 +354,6 @@ norm(Tag) when is_binary(Tag) ->
norm(Tag) ->
list_to_binary(string:to_lower(Tag)).
-test_destack() ->
- {<<"a">>, [], []} =
- destack([{<<"a">>, [], []}]),
- {<<"a">>, [], [{<<"b">>, [], []}]} =
- destack([{<<"b">>, [], []}, {<<"a">>, [], []}]),
- {<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]} =
- destack([{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
- [{<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]}] =
- destack(<<"b">>,
- [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
- [{<<"b">>, [], [{<<"c">>, [], []}]}, {<<"a">>, [], []}] =
- destack(<<"c">>,
- [{<<"c">>, [], []}, {<<"b">>, [], []},{<<"a">>, [], []}]),
- ok.
-
stack(T1={TN, _, _}, Stack=[{TN, _, _} | _Rest])
when TN =:= <<"li">> orelse TN =:= <<"option">> ->
[T1 | destack(TN, Stack)];
@@ -719,9 +493,10 @@ find_qgt(Bin, S=#decoder{offset=O}) ->
case Bin of
<<_:O/binary, "?>", _/binary>> ->
?ADV_COL(S, 2);
- <<_:O/binary, C, _/binary>> ->
- find_qgt(Bin, ?INC_CHAR(S, C));
- _ ->
+ %% tokenize_attributes takes care of this state:
+ %% <<_:O/binary, C, _/binary>> ->
+ %% find_qgt(Bin, ?INC_CHAR(S, C));
+ <<_:O/binary>> ->
S
end.
@@ -766,7 +541,7 @@ tokenize_charref(Bin, S=#decoder{offset=O}, Start) ->
<<_:Start1/binary, R:Len1/binary, _/binary>> = Bin,
R;
Unichar ->
- list_to_binary(xmerl_ucs:to_utf8(Unichar))
+ mochiutf8:codepoint_to_bytes(Unichar)
end,
{{data, Data, false}, ?INC_COL(S)};
_ ->
@@ -791,11 +566,10 @@ tokenize_doctype(Bin, S=#decoder{offset=O}, Acc) ->
tokenize_word_or_literal(Bin, S=#decoder{offset=O}) ->
case Bin of
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- {error, {whitespace, [C], S}};
<<_:O/binary, C, _/binary>> when C =:= ?QUOTE orelse C =:= ?SQUOTE ->
tokenize_word(Bin, ?INC_COL(S), C);
- _ ->
+ <<_:O/binary, C, _/binary>> when not ?IS_WHITESPACE(C) ->
+ %% Sanity check for whitespace
tokenize_literal(Bin, S, [])
end.
@@ -852,13 +626,14 @@ tokenize_script(Bin, S=#decoder{offset=O}) ->
tokenize_script(Bin, S=#decoder{offset=O}, Start) ->
case Bin of
%% Just a look-ahead, we want the end_tag separately
- <<_:O/binary, $<, $/, SS, CC, RR, II, PP, TT, _/binary>>
+ <<_:O/binary, $<, $/, SS, CC, RR, II, PP, TT, ZZ, _/binary>>
when (SS =:= $s orelse SS =:= $S) andalso
(CC =:= $c orelse CC =:= $C) andalso
(RR =:= $r orelse RR =:= $R) andalso
(II =:= $i orelse II =:= $I) andalso
(PP =:= $p orelse PP =:= $P) andalso
- (TT=:= $t orelse TT =:= $T) ->
+ (TT=:= $t orelse TT =:= $T) andalso
+ ?PROBABLE_CLOSE(ZZ) ->
Len = O - Start,
<<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
{{data, Raw, false}, S};
@@ -874,7 +649,7 @@ tokenize_textarea(Bin, S=#decoder{offset=O}) ->
tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
case Bin of
%% Just a look-ahead, we want the end_tag separately
- <<_:O/binary, $<, $/, TT, EE, XX, TT2, AA, RR, EE2, AA2, _/binary>>
+ <<_:O/binary, $<, $/, TT, EE, XX, TT2, AA, RR, EE2, AA2, ZZ, _/binary>>
when (TT =:= $t orelse TT =:= $T) andalso
(EE =:= $e orelse EE =:= $E) andalso
(XX =:= $x orelse XX =:= $X) andalso
@@ -882,7 +657,8 @@ tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
(AA =:= $a orelse AA =:= $A) andalso
(RR =:= $r orelse RR =:= $R) andalso
(EE2 =:= $e orelse EE2 =:= $E) andalso
- (AA2 =:= $a orelse AA2 =:= $A) ->
+ (AA2 =:= $a orelse AA2 =:= $A) andalso
+ ?PROBABLE_CLOSE(ZZ) ->
Len = O - Start,
<<_:Start/binary, Raw:Len/binary, _/binary>> = Bin,
{{data, Raw, false}, S};
@@ -891,3 +667,395 @@ tokenize_textarea(Bin, S=#decoder{offset=O}, Start) ->
<<_:Start/binary, Raw/binary>> ->
{{data, Raw, false}, S}
end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+to_html_test() ->
+ ?assertEqual(
+ <<"<html><head><title>hey!</title></head><body><p class=\"foo\">what's up<br /></p><div>sucka</div>RAW!<!-- comment! --></body></html>">>,
+ iolist_to_binary(
+ to_html({html, [],
+ [{<<"head">>, [],
+ [{title, <<"hey!">>}]},
+ {body, [],
+ [{p, [{class, foo}], [<<"what's">>, <<" up">>, {br}]},
+ {'div', <<"sucka">>},
+ {'=', <<"RAW!">>},
+ {comment, <<" comment! ">>}]}]}))),
+ ?assertEqual(
+ <<"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">">>,
+ iolist_to_binary(
+ to_html({doctype,
+ [<<"html">>, <<"PUBLIC">>,
+ <<"-//W3C//DTD XHTML 1.0 Transitional//EN">>,
+ <<"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">>]}))),
+ ?assertEqual(
+ <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>,
+ iolist_to_binary(
+ to_html({<<"html">>,[],
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]}))),
+ ok.
+
+escape_test() ->
+ ?assertEqual(
+ <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape(<<"&quot;\"word ><<up!&quot;">>)),
+ ?assertEqual(
+ <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape("&quot;\"word ><<up!&quot;")),
+ ?assertEqual(
+ <<"&amp;quot;\"word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape('&quot;\"word ><<up!&quot;')),
+ ok.
+
+escape_attr_test() ->
+ ?assertEqual(
+ <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape_attr(<<"&quot;\"word ><<up!&quot;">>)),
+ ?assertEqual(
+ <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape_attr("&quot;\"word ><<up!&quot;")),
+ ?assertEqual(
+ <<"&amp;quot;&quot;word &gt;&lt;&lt;up!&amp;quot;">>,
+ escape_attr('&quot;\"word ><<up!&quot;')),
+ ?assertEqual(
+ <<"12345">>,
+ escape_attr(12345)),
+ ?assertEqual(
+ <<"1.5">>,
+ escape_attr(1.5)),
+ ok.
+
+tokens_test() ->
+ ?assertEqual(
+ [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"alice">>, <<"bob">>}], true}],
+ tokens(<<"<foo bar=baz wibble='wibble' alice=\"bob\"/>">>)),
+ ?assertEqual(
+ [{start_tag, <<"foo">>, [{<<"bar">>, <<"baz">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"alice">>, <<"bob">>}], true}],
+ tokens(<<"<foo bar=baz wibble='wibble' alice=bob/>">>)),
+ ?assertEqual(
+ [{comment, <<"[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]">>}],
+ tokens(<<"<!--[if lt IE 7]>\n<style type=\"text/css\">\n.no_ie { display: none; }\n</style>\n<![endif]-->">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type=\"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type =\"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type = \"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"script">>, [{<<"type">>, <<"text/javascript">>}], false},
+ {data, <<" A= B <= C ">>, false},
+ {end_tag, <<"script">>}],
+ tokens(<<"<script type= \"text/javascript\"> A= B <= C </script>">>)),
+ ?assertEqual(
+ [{start_tag, <<"textarea">>, [], false},
+ {data, <<"<html></body>">>, false},
+ {end_tag, <<"textarea">>}],
+ tokens(<<"<textarea><html></body></textarea>">>)),
+ ?assertEqual(
+ [{start_tag, <<"textarea">>, [], false},
+ {data, <<"<html></body></textareaz>">>, false}],
+ tokens(<<"<textarea ><html></body></textareaz>">>)),
+ ?assertEqual(
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
+ tokens(<<"<?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?>">>)),
+ ?assertEqual(
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
+ tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office \n?>">>)),
+ ?assertEqual(
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}],
+ tokens(<<"<?xml:namespace prefix=o ns=urn:schemas-microsoft-com:office:office">>)),
+ ?assertEqual(
+ [{data, <<"<">>, false}],
+ tokens(<<"&lt;">>)),
+ ?assertEqual(
+ [{data, <<"not html ">>, false},
+ {data, <<"< at all">>, false}],
+ tokens(<<"not html < at all">>)),
+ ok.
+
+parse_test() ->
+ D0 = <<"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">
+<html>
+ <head>
+ <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">
+ <title>Foo</title>
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/rel/dojo/resources/dojo.css\" media=\"screen\">
+ <link rel=\"stylesheet\" type=\"text/css\" href=\"/static/foo.css\" media=\"screen\">
+ <!--[if lt IE 7]>
+ <style type=\"text/css\">
+ .no_ie { display: none; }
+ </style>
+ <![endif]-->
+ <link rel=\"icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
+ <link rel=\"shortcut icon\" href=\"/static/images/favicon.ico\" type=\"image/x-icon\">
+ </head>
+ <body id=\"home\" class=\"tundra\"><![CDATA[&lt;<this<!-- is -->CDATA>&gt;]]></body>
+</html>">>,
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"head">>, [],
+ [{<<"meta">>,
+ [{<<"http-equiv">>,<<"Content-Type">>},
+ {<<"content">>,<<"text/html; charset=UTF-8">>}],
+ []},
+ {<<"title">>,[],[<<"Foo">>]},
+ {<<"link">>,
+ [{<<"rel">>,<<"stylesheet">>},
+ {<<"type">>,<<"text/css">>},
+ {<<"href">>,<<"/static/rel/dojo/resources/dojo.css">>},
+ {<<"media">>,<<"screen">>}],
+ []},
+ {<<"link">>,
+ [{<<"rel">>,<<"stylesheet">>},
+ {<<"type">>,<<"text/css">>},
+ {<<"href">>,<<"/static/foo.css">>},
+ {<<"media">>,<<"screen">>}],
+ []},
+ {comment,<<"[if lt IE 7]>\n <style type=\"text/css\">\n .no_ie { display: none; }\n </style>\n <![endif]">>},
+ {<<"link">>,
+ [{<<"rel">>,<<"icon">>},
+ {<<"href">>,<<"/static/images/favicon.ico">>},
+ {<<"type">>,<<"image/x-icon">>}],
+ []},
+ {<<"link">>,
+ [{<<"rel">>,<<"shortcut icon">>},
+ {<<"href">>,<<"/static/images/favicon.ico">>},
+ {<<"type">>,<<"image/x-icon">>}],
+ []}]},
+ {<<"body">>,
+ [{<<"id">>,<<"home">>},
+ {<<"class">>,<<"tundra">>}],
+ [<<"&lt;<this<!-- is -->CDATA>&gt;">>]}]},
+ parse(D0)),
+ ?assertEqual(
+ {<<"html">>,[],
+ [{pi, <<"xml:namespace">>,
+ [{<<"prefix">>,<<"o">>},
+ {<<"ns">>,<<"urn:schemas-microsoft-com:office:office">>}]}]},
+ parse(
+ <<"<html><?xml:namespace prefix=\"o\" ns=\"urn:schemas-microsoft-com:office:office\"?></html>">>)),
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"dd">>, [], [<<"foo">>]},
+ {<<"dt">>, [], [<<"bar">>]}]},
+ parse(<<"<html><dd>foo<dt>bar</html>">>)),
+ %% Singleton sadness
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"link">>, [], []},
+ <<"foo">>,
+ {<<"br">>, [], []},
+ <<"bar">>]},
+ parse(<<"<html><link>foo<br>bar</html>">>)),
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"link">>, [], [<<"foo">>,
+ {<<"br">>, [], []},
+ <<"bar">>]}]},
+ parse(<<"<html><link>foo<br>bar</link></html>">>)),
+ ok.
+
+exhaustive_is_singleton_test() ->
+ T = mochiweb_cover:clause_lookup_table(?MODULE, is_singleton),
+ [?assertEqual(V, is_singleton(K)) || {K, V} <- T].
+
+tokenize_attributes_test() ->
+ ?assertEqual(
+ {<<"foo">>,
+ [{<<"bar">>, <<"b\"az">>},
+ {<<"wibble">>, <<"wibble">>},
+ {<<"taco", 16#c2, 16#a9>>, <<"bell">>},
+ {<<"quux">>, <<"quux">>}],
+ []},
+ parse(<<"<foo bar=\"b&quot;az\" wibble taco&copy;=bell quux">>)),
+ ok.
+
+tokens2_test() ->
+ D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org</link><description>Bob's Rants</description></channel>">>,
+ ?assertEqual(
+ [{start_tag,<<"channel">>,[],false},
+ {start_tag,<<"title">>,[],false},
+ {data,<<"from __future__ import *">>,false},
+ {end_tag,<<"title">>},
+ {start_tag,<<"link">>,[],true},
+ {data,<<"http://bob.pythonmac.org">>,false},
+ {end_tag,<<"link">>},
+ {start_tag,<<"description">>,[],false},
+ {data,<<"Bob's Rants">>,false},
+ {end_tag,<<"description">>},
+ {end_tag,<<"channel">>}],
+ tokens(D0)),
+ ok.
+
+to_tokens_test() ->
+ ?assertEqual(
+ [{start_tag, <<"p">>, [{class, 1}], false},
+ {end_tag, <<"p">>}],
+ to_tokens({p, [{class, 1}], []})),
+ ?assertEqual(
+ [{start_tag, <<"p">>, [], false},
+ {end_tag, <<"p">>}],
+ to_tokens({p})),
+ ?assertEqual(
+ [{'=', <<"data">>}],
+ to_tokens({'=', <<"data">>})),
+ ?assertEqual(
+ [{comment, <<"comment">>}],
+ to_tokens({comment, <<"comment">>})),
+ %% This is only allowed in sub-tags:
+ %% {p, [{"class", "foo"}]} as {p, [{"class", "foo"}], []}
+ %% On the outside it's always treated as follows:
+ %% {p, [], [{"class", "foo"}]} as {p, [], [{"class", "foo"}]}
+ ?assertEqual(
+ [{start_tag, <<"html">>, [], false},
+ {start_tag, <<"p">>, [{class, 1}], false},
+ {end_tag, <<"p">>},
+ {end_tag, <<"html">>}],
+ to_tokens({html, [{p, [{class, 1}]}]})),
+ ok.
+
+parse2_test() ->
+ D0 = <<"<channel><title>from __future__ import *</title><link>http://bob.pythonmac.org<br>foo</link><description>Bob's Rants</description></channel>">>,
+ ?assertEqual(
+ {<<"channel">>,[],
+ [{<<"title">>,[],[<<"from __future__ import *">>]},
+ {<<"link">>,[],[
+ <<"http://bob.pythonmac.org">>,
+ {<<"br">>,[],[]},
+ <<"foo">>]},
+ {<<"description">>,[],[<<"Bob's Rants">>]}]},
+ parse(D0)),
+ ok.
+
+parse_tokens_test() ->
+ D0 = [{doctype,[<<"HTML">>,<<"PUBLIC">>,<<"-//W3C//DTD HTML 4.01 Transitional//EN">>]},
+ {data,<<"\n">>,true},
+ {start_tag,<<"html">>,[],false}],
+ ?assertEqual(
+ {<<"html">>, [], []},
+ parse_tokens(D0)),
+ D1 = D0 ++ [{end_tag, <<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [], []},
+ parse_tokens(D1)),
+ D2 = D0 ++ [{start_tag, <<"body">>, [], false}],
+ ?assertEqual(
+ {<<"html">>, [], [{<<"body">>, [], []}]},
+ parse_tokens(D2)),
+ D3 = D0 ++ [{start_tag, <<"head">>, [], false},
+ {end_tag, <<"head">>},
+ {start_tag, <<"body">>, [], false}],
+ ?assertEqual(
+ {<<"html">>, [], [{<<"head">>, [], []}, {<<"body">>, [], []}]},
+ parse_tokens(D3)),
+ D4 = D3 ++ [{data,<<"\n">>,true},
+ {start_tag,<<"div">>,[{<<"class">>,<<"a">>}],false},
+ {start_tag,<<"a">>,[{<<"name">>,<<"#anchor">>}],false},
+ {end_tag,<<"a">>},
+ {end_tag,<<"div">>},
+ {start_tag,<<"div">>,[{<<"class">>,<<"b">>}],false},
+ {start_tag,<<"div">>,[{<<"class">>,<<"c">>}],false},
+ {end_tag,<<"div">>},
+ {end_tag,<<"div">>}],
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"head">>, [], []},
+ {<<"body">>, [],
+ [{<<"div">>, [{<<"class">>, <<"a">>}], [{<<"a">>, [{<<"name">>, <<"#anchor">>}], []}]},
+ {<<"div">>, [{<<"class">>, <<"b">>}], [{<<"div">>, [{<<"class">>, <<"c">>}], []}]}
+ ]}]},
+ parse_tokens(D4)),
+ D5 = [{start_tag,<<"html">>,[],false},
+ {data,<<"\n">>,true},
+ {data,<<"boo">>,false},
+ {data,<<"hoo">>,false},
+ {data,<<"\n">>,true},
+ {end_tag,<<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [], [<<"\nboohoo\n">>]},
+ parse_tokens(D5)),
+ D6 = [{start_tag,<<"html">>,[],false},
+ {data,<<"\n">>,true},
+ {data,<<"\n">>,true},
+ {end_tag,<<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [], []},
+ parse_tokens(D6)),
+ D7 = [{start_tag,<<"html">>,[],false},
+ {start_tag,<<"ul">>,[],false},
+ {start_tag,<<"li">>,[],false},
+ {data,<<"word">>,false},
+ {start_tag,<<"li">>,[],false},
+ {data,<<"up">>,false},
+ {end_tag,<<"li">>},
+ {start_tag,<<"li">>,[],false},
+ {data,<<"fdsa">>,false},
+ {start_tag,<<"br">>,[],true},
+ {data,<<"asdf">>,false},
+ {end_tag,<<"ul">>},
+ {end_tag,<<"html">>}],
+ ?assertEqual(
+ {<<"html">>, [],
+ [{<<"ul">>, [],
+ [{<<"li">>, [], [<<"word">>]},
+ {<<"li">>, [], [<<"up">>]},
+ {<<"li">>, [], [<<"fdsa">>,{<<"br">>, [], []}, <<"asdf">>]}]}]},
+ parse_tokens(D7)),
+ ok.
+
+destack_test() ->
+ {<<"a">>, [], []} =
+ destack([{<<"a">>, [], []}]),
+ {<<"a">>, [], [{<<"b">>, [], []}]} =
+ destack([{<<"b">>, [], []}, {<<"a">>, [], []}]),
+ {<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]} =
+ destack([{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
+ [{<<"a">>, [], [{<<"b">>, [], [{<<"c">>, [], []}]}]}] =
+ destack(<<"b">>,
+ [{<<"c">>, [], []}, {<<"b">>, [], []}, {<<"a">>, [], []}]),
+ [{<<"b">>, [], [{<<"c">>, [], []}]}, {<<"a">>, [], []}] =
+ destack(<<"c">>,
+ [{<<"c">>, [], []}, {<<"b">>, [], []},{<<"a">>, [], []}]),
+ ok.
+
+doctype_test() ->
+ ?assertEqual(
+ {<<"html">>,[],[{<<"head">>,[],[]}]},
+ mochiweb_html:parse("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
+ "<html><head></head></body></html>")),
+ %% http://code.google.com/p/mochiweb/issues/detail?id=52
+ ?assertEqual(
+ {<<"html">>,[],[{<<"head">>,[],[]}]},
+ mochiweb_html:parse("<html>"
+ "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\">"
+ "<head></head></body></html>")),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_http.erl b/src/mochiweb/mochiweb_http.erl
index f1821f40..ab0af7e8 100644
--- a/src/mochiweb/mochiweb_http.erl
+++ b/src/mochiweb/mochiweb_http.erl
@@ -8,31 +8,22 @@
-export([start/0, start/1, stop/0, stop/1]).
-export([loop/2, default_body/1]).
-export([after_response/2, reentry/1]).
+-export([parse_range_request/1, range_skip_length/2]).
--define(IDLE_TIMEOUT, 30000).
+-define(REQUEST_RECV_TIMEOUT, 300000). % timeout waiting for request line
+-define(HEADERS_RECV_TIMEOUT, 30000). % timeout waiting for headers
-define(MAX_HEADERS, 1000).
-define(DEFAULTS, [{name, ?MODULE},
{port, 8888}]).
-set_default({Prop, Value}, PropList) ->
- case proplists:is_defined(Prop, PropList) of
- true ->
- PropList;
- false ->
- [{Prop, Value} | PropList]
- end.
-
-set_defaults(Defaults, PropList) ->
- lists:foldl(fun set_default/2, PropList, Defaults).
-
parse_options(Options) ->
{loop, HttpLoop} = proplists:lookup(loop, Options),
Loop = fun (S) ->
?MODULE:loop(S, HttpLoop)
end,
Options1 = [{loop, Loop} | proplists:delete(loop, Options)],
- set_defaults(?DEFAULTS, Options1).
+ mochilists:set_defaults(?DEFAULTS, Options1).
stop() ->
mochiweb_socket_server:stop(?MODULE).
@@ -95,20 +86,26 @@ default_body(Req) ->
default_body(Req, Req:get(method), Req:get(path)).
loop(Socket, Body) ->
- inet:setopts(Socket, [{packet, http}]),
+ mochiweb_socket:setopts(Socket, [{packet, http}]),
request(Socket, Body).
request(Socket, Body) ->
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 0, ?REQUEST_RECV_TIMEOUT) of
{ok, {http_request, Method, Path, Version}} ->
+ mochiweb_socket:setopts(Socket, [{packet, httph}]),
headers(Socket, {Method, Path, Version}, [], Body, 0);
{error, {http_error, "\r\n"}} ->
request(Socket, Body);
{error, {http_error, "\n"}} ->
request(Socket, Body);
+ {error, closed} ->
+ mochiweb_socket:close(Socket),
+ exit(normal);
+ {error, timeout} ->
+ mochiweb_socket:close(Socket),
+ exit(normal);
_Other ->
- gen_tcp:close(Socket),
- exit(normal)
+ handle_invalid_request(Socket)
end.
reentry(Body) ->
@@ -118,35 +115,159 @@ reentry(Body) ->
headers(Socket, Request, Headers, _Body, ?MAX_HEADERS) ->
%% Too many headers sent, bad request.
- inet:setopts(Socket, [{packet, raw}]),
- Req = mochiweb:new_request({Socket, Request,
- lists:reverse(Headers)}),
- Req:respond({400, [], []}),
- gen_tcp:close(Socket),
- exit(normal);
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ handle_invalid_request(Socket, Request, Headers);
headers(Socket, Request, Headers, Body, HeaderCount) ->
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 0, ?HEADERS_RECV_TIMEOUT) of
{ok, http_eoh} ->
- inet:setopts(Socket, [{packet, raw}]),
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
Req = mochiweb:new_request({Socket, Request,
lists:reverse(Headers)}),
- Body(Req),
+ call_body(Body, Req),
?MODULE:after_response(Body, Req);
{ok, {http_header, _, Name, _, Value}} ->
headers(Socket, Request, [{Name, Value} | Headers], Body,
1 + HeaderCount);
+ {error, closed} ->
+ mochiweb_socket:close(Socket),
+ exit(normal);
_Other ->
- gen_tcp:close(Socket),
- exit(normal)
+ handle_invalid_request(Socket, Request, Headers)
end.
+call_body({M, F}, Req) ->
+ M:F(Req);
+call_body(Body, Req) ->
+ Body(Req).
+
+handle_invalid_request(Socket) ->
+ handle_invalid_request(Socket, {'GET', {abs_path, "/"}, {0,9}}, []).
+
+handle_invalid_request(Socket, Request, RevHeaders) ->
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ Req = mochiweb:new_request({Socket, Request,
+ lists:reverse(RevHeaders)}),
+ Req:respond({400, [], []}),
+ mochiweb_socket:close(Socket),
+ exit(normal).
+
after_response(Body, Req) ->
Socket = Req:get(socket),
case Req:should_close() of
true ->
- gen_tcp:close(Socket),
+ mochiweb_socket:close(Socket),
exit(normal);
false ->
Req:cleanup(),
?MODULE:loop(Socket, Body)
end.
+
+parse_range_request(RawRange) when is_list(RawRange) ->
+ try
+ "bytes=" ++ RangeString = RawRange,
+ Ranges = string:tokens(RangeString, ","),
+ lists:map(fun ("-" ++ V) ->
+ {none, list_to_integer(V)};
+ (R) ->
+ case string:tokens(R, "-") of
+ [S1, S2] ->
+ {list_to_integer(S1), list_to_integer(S2)};
+ [S] ->
+ {list_to_integer(S), none}
+ end
+ end,
+ Ranges)
+ catch
+ _:_ ->
+ fail
+ end.
+
+range_skip_length(Spec, Size) ->
+ case Spec of
+ {none, R} when R =< Size, R >= 0 ->
+ {Size - R, R};
+ {none, _OutOfRange} ->
+ {0, Size};
+ {R, none} when R >= 0, R < Size ->
+ {R, Size - R};
+ {_OutOfRange, none} ->
+ invalid_range;
+ {Start, End} when 0 =< Start, Start =< End, End < Size ->
+ {Start, End - Start + 1};
+ {_OutOfRange, _End} ->
+ invalid_range
+ end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+range_test() ->
+ %% valid, single ranges
+ ?assertEqual([{20, 30}], parse_range_request("bytes=20-30")),
+ ?assertEqual([{20, none}], parse_range_request("bytes=20-")),
+ ?assertEqual([{none, 20}], parse_range_request("bytes=-20")),
+
+ %% trivial single range
+ ?assertEqual(undefined, parse_range_request("bytes=0-")),
+
+ %% invalid, single ranges
+ ?assertEqual(fail, parse_range_request("")),
+ ?assertEqual(fail, parse_range_request("garbage")),
+ ?assertEqual(fail, parse_range_request("bytes=-20-30")),
+
+ %% valid, multiple range
+ ?assertEqual(
+ [{20, 30}, {50, 100}, {110, 200}],
+ parse_range_request("bytes=20-30,50-100,110-200")),
+ ?assertEqual(
+ [{20, none}, {50, 100}, {none, 200}],
+ parse_range_request("bytes=20-,50-100,-200")),
+
+ %% no ranges
+ ?assertEqual([], parse_range_request("bytes=")),
+ ok.
+
+range_skip_length_test() ->
+ Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
+ BodySize = byte_size(Body), %% 60
+ BodySize = 60,
+
+ %% these values assume BodySize =:= 60
+ ?assertEqual({1,9}, range_skip_length({1,9}, BodySize)), %% 1-9
+ ?assertEqual({10,10}, range_skip_length({10,19}, BodySize)), %% 10-19
+ ?assertEqual({40, 20}, range_skip_length({none, 20}, BodySize)), %% -20
+ ?assertEqual({30, 30}, range_skip_length({30, none}, BodySize)), %% 30-
+
+ %% valid edge cases for range_skip_length
+ ?assertEqual({BodySize, 0}, range_skip_length({none, 0}, BodySize)),
+ ?assertEqual({0, BodySize}, range_skip_length({none, BodySize}, BodySize)),
+ ?assertEqual({0, BodySize}, range_skip_length({0, none}, BodySize)),
+ BodySizeLess1 = BodySize - 1,
+ ?assertEqual({BodySizeLess1, 1},
+ range_skip_length({BodySize - 1, none}, BodySize)),
+
+ %% out of range, return whole thing
+ ?assertEqual({0, BodySize},
+ range_skip_length({none, BodySize + 1}, BodySize)),
+ ?assertEqual({0, BodySize},
+ range_skip_length({none, -1}, BodySize)),
+
+ %% invalid ranges
+ ?assertEqual(invalid_range,
+ range_skip_length({-1, 30}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({0, BodySize + 1}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({-1, BodySize + 1}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({BodySize, 40}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({-1, none}, BodySize)),
+ ?assertEqual(invalid_range,
+ range_skip_length({BodySize, none}, BodySize)),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_io.erl b/src/mochiweb/mochiweb_io.erl
new file mode 100644
index 00000000..6ce57ec8
--- /dev/null
+++ b/src/mochiweb/mochiweb_io.erl
@@ -0,0 +1,46 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Utilities for dealing with IO devices (open files).
+
+-module(mochiweb_io).
+-author('bob@mochimedia.com').
+
+-export([iodevice_stream/3, iodevice_stream/2]).
+-export([iodevice_foldl/4, iodevice_foldl/3]).
+-export([iodevice_size/1]).
+-define(READ_SIZE, 8192).
+
+iodevice_foldl(F, Acc, IoDevice) ->
+ iodevice_foldl(F, Acc, IoDevice, ?READ_SIZE).
+
+iodevice_foldl(F, Acc, IoDevice, BufferSize) ->
+ case file:read(IoDevice, BufferSize) of
+ eof ->
+ Acc;
+ {ok, Data} ->
+ iodevice_foldl(F, F(Data, Acc), IoDevice, BufferSize)
+ end.
+
+iodevice_stream(Callback, IoDevice) ->
+ iodevice_stream(Callback, IoDevice, ?READ_SIZE).
+
+iodevice_stream(Callback, IoDevice, BufferSize) ->
+ F = fun (Data, ok) -> Callback(Data) end,
+ ok = iodevice_foldl(F, ok, IoDevice, BufferSize).
+
+iodevice_size(IoDevice) ->
+ {ok, Size} = file:position(IoDevice, eof),
+ {ok, 0} = file:position(IoDevice, bof),
+ Size.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+
+
+-endif.
diff --git a/src/mochiweb/mochiweb_mime.erl b/src/mochiweb/mochiweb_mime.erl
new file mode 100644
index 00000000..5344aee7
--- /dev/null
+++ b/src/mochiweb/mochiweb_mime.erl
@@ -0,0 +1,94 @@
+%% @author Bob Ippolito <bob@mochimedia.com>
+%% @copyright 2007 Mochi Media, Inc.
+
+%% @doc Gives a good MIME type guess based on file extension.
+
+-module(mochiweb_mime).
+-author('bob@mochimedia.com').
+-export([from_extension/1]).
+
+%% @spec from_extension(S::string()) -> string() | undefined
+%% @doc Given a filename extension (e.g. ".html") return a guess for the MIME
+%% type such as "text/html". Will return the atom undefined if no good
+%% guess is available.
+from_extension(".html") ->
+ "text/html";
+from_extension(".xhtml") ->
+ "application/xhtml+xml";
+from_extension(".xml") ->
+ "application/xml";
+from_extension(".css") ->
+ "text/css";
+from_extension(".js") ->
+ "application/x-javascript";
+from_extension(".jpg") ->
+ "image/jpeg";
+from_extension(".gif") ->
+ "image/gif";
+from_extension(".png") ->
+ "image/png";
+from_extension(".swf") ->
+ "application/x-shockwave-flash";
+from_extension(".zip") ->
+ "application/zip";
+from_extension(".bz2") ->
+ "application/x-bzip2";
+from_extension(".gz") ->
+ "application/x-gzip";
+from_extension(".tar") ->
+ "application/x-tar";
+from_extension(".tgz") ->
+ "application/x-gzip";
+from_extension(".txt") ->
+ "text/plain";
+from_extension(".doc") ->
+ "application/msword";
+from_extension(".pdf") ->
+ "application/pdf";
+from_extension(".xls") ->
+ "application/vnd.ms-excel";
+from_extension(".rtf") ->
+ "application/rtf";
+from_extension(".mov") ->
+ "video/quicktime";
+from_extension(".mp3") ->
+ "audio/mpeg";
+from_extension(".z") ->
+ "application/x-compress";
+from_extension(".wav") ->
+ "audio/x-wav";
+from_extension(".ico") ->
+ "image/x-icon";
+from_extension(".bmp") ->
+ "image/bmp";
+from_extension(".m4a") ->
+ "audio/mpeg";
+from_extension(".m3u") ->
+ "audio/x-mpegurl";
+from_extension(".exe") ->
+ "application/octet-stream";
+from_extension(".csv") ->
+ "text/csv";
+from_extension(_) ->
+ undefined.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+exhaustive_from_extension_test() ->
+ T = mochiweb_cover:clause_lookup_table(?MODULE, from_extension),
+ [?assertEqual(V, from_extension(K)) || {K, V} <- T].
+
+from_extension_test() ->
+ ?assertEqual("text/html",
+ from_extension(".html")),
+ ?assertEqual(undefined,
+ from_extension("")),
+ ?assertEqual(undefined,
+ from_extension(".wtf")),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_multipart.erl b/src/mochiweb/mochiweb_multipart.erl
index 0368a9a6..3069cf4d 100644
--- a/src/mochiweb/mochiweb_multipart.erl
+++ b/src/mochiweb/mochiweb_multipart.erl
@@ -8,17 +8,73 @@
-export([parse_form/1, parse_form/2]).
-export([parse_multipart_request/2]).
--export([test/0]).
+-export([parts_to_body/3, parts_to_multipart_body/4]).
+-export([default_file_handler/2]).
-define(CHUNKSIZE, 4096).
-record(mp, {state, boundary, length, buffer, callback, req}).
%% TODO: DOCUMENT THIS MODULE.
-
+%% @type key() = atom() | string() | binary().
+%% @type value() = atom() | iolist() | integer().
+%% @type header() = {key(), value()}.
+%% @type bodypart() = {Start::integer(), End::integer(), Body::iolist()}.
+%% @type formfile() = {Name::string(), ContentType::string(), Content::binary()}.
+%% @type request().
+%% @type file_handler() = (Filename::string(), ContentType::string()) -> file_handler_callback().
+%% @type file_handler_callback() = (binary() | eof) -> file_handler_callback() | term().
+
+%% @spec parts_to_body([bodypart()], ContentType::string(),
+%% Size::integer()) -> {[header()], iolist()}
+%% @doc Return {[header()], iolist()} representing the body for the given
+%% parts, may be a single part or multipart.
+parts_to_body([{Start, End, Body}], ContentType, Size) ->
+ HeaderList = [{"Content-Type", ContentType},
+ {"Content-Range",
+ ["bytes ",
+ mochiweb_util:make_io(Start), "-", mochiweb_util:make_io(End),
+ "/", mochiweb_util:make_io(Size)]}],
+ {HeaderList, Body};
+parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
+ parts_to_multipart_body(BodyList, ContentType, Size,
+ mochihex:to_hex(crypto:rand_bytes(8))).
+
+%% @spec parts_to_multipart_body([bodypart()], ContentType::string(),
+%% Size::integer(), Boundary::string()) ->
+%% {[header()], iolist()}
+%% @doc Return {[header()], iolist()} representing the body for the given
+%% parts, always a multipart response.
+parts_to_multipart_body(BodyList, ContentType, Size, Boundary) ->
+ HeaderList = [{"Content-Type",
+ ["multipart/byteranges; ",
+ "boundary=", Boundary]}],
+ MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
+
+ {HeaderList, MultiPartBody}.
+
+%% @spec multipart_body([bodypart()], ContentType::string(),
+%% Boundary::string(), Size::integer()) -> iolist()
+%% @doc Return the representation of a multipart body for the given [bodypart()].
+multipart_body([], _ContentType, Boundary, _Size) ->
+ ["--", Boundary, "--\r\n"];
+multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
+ ["--", Boundary, "\r\n",
+ "Content-Type: ", ContentType, "\r\n",
+ "Content-Range: ",
+ "bytes ", mochiweb_util:make_io(Start), "-", mochiweb_util:make_io(End),
+ "/", mochiweb_util:make_io(Size), "\r\n\r\n",
+ Body, "\r\n"
+ | multipart_body(BodyList, ContentType, Boundary, Size)].
+
+%% @spec parse_form(request()) -> [{string(), string() | formfile()}]
+%% @doc Parse a multipart form from the given request using the in-memory
+%% default_file_handler/2.
parse_form(Req) ->
parse_form(Req, fun default_file_handler/2).
+%% @spec parse_form(request(), F::file_handler()) -> [{string(), string() | term()}]
+%% @doc Parse a multipart form from the given request using the given file_handler().
parse_form(Req, FileHandler) ->
Callback = fun (Next) -> parse_form_outer(Next, FileHandler, []) end,
{_, _, Res} = parse_multipart_request(Req, Callback),
@@ -236,13 +292,38 @@ find_boundary(Prefix, Data) ->
not_found
end.
-with_socket_server(ServerFun, ClientFun) ->
- {ok, Server} = mochiweb_socket_server:start([{ip, "127.0.0.1"},
- {port, 0},
- {loop, ServerFun}]),
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+ssl_cert_opts() ->
+ EbinDir = filename:dirname(code:which(?MODULE)),
+ CertDir = filename:join([EbinDir, "..", "support", "test-materials"]),
+ CertFile = filename:join(CertDir, "test_ssl_cert.pem"),
+ KeyFile = filename:join(CertDir, "test_ssl_key.pem"),
+ [{certfile, CertFile}, {keyfile, KeyFile}].
+
+with_socket_server(Transport, ServerFun, ClientFun) ->
+ ServerOpts0 = [{ip, "127.0.0.1"}, {port, 0}, {loop, ServerFun}],
+ ServerOpts = case Transport of
+ plain ->
+ ServerOpts0;
+ ssl ->
+ ServerOpts0 ++ [{ssl, true}, {ssl_opts, ssl_cert_opts()}]
+ end,
+ {ok, Server} = mochiweb_socket_server:start(ServerOpts),
Port = mochiweb_socket_server:get(Server, port),
- {ok, Client} = gen_tcp:connect("127.0.0.1", Port,
- [binary, {active, false}]),
+ ClientOpts = [binary, {active, false}],
+ {ok, Client} = case Transport of
+ plain ->
+ gen_tcp:connect("127.0.0.1", Port, ClientOpts);
+ ssl ->
+ ClientOpts1 = [{ssl_imp, new} | ClientOpts],
+ {ok, SslSocket} = ssl:connect("127.0.0.1", Port, ClientOpts1),
+ {ok, {ssl, SslSocket}}
+ end,
Res = (catch ClientFun(Client)),
mochiweb_socket_server:stop(Server),
Res.
@@ -256,19 +337,30 @@ fake_request(Socket, ContentType, Length) ->
[{"content-type", ContentType},
{"content-length", Length}])).
-test_callback(Expect, [Expect | Rest]) ->
+test_callback({body, <<>>}, Rest=[body_end | _]) ->
+ %% When expecting the body_end we might get an empty binary
+ fun (Next) -> test_callback(Next, Rest) end;
+test_callback({body, Got}, [{body, Expect} | Rest]) when Got =/= Expect ->
+ %% Partial response
+ GotSize = size(Got),
+ <<Got:GotSize/binary, Expect1/binary>> = Expect,
+ fun (Next) -> test_callback(Next, [{body, Expect1} | Rest]) end;
+test_callback(Got, [Expect | Rest]) ->
+ ?assertEqual(Got, Expect),
case Rest of
[] ->
ok;
_ ->
fun (Next) -> test_callback(Next, Rest) end
- end;
-test_callback({body, Got}, [{body, Expect} | Rest]) ->
- GotSize = size(Got),
- <<Got:GotSize/binary, Expect1/binary>> = Expect,
- fun (Next) -> test_callback(Next, [{body, Expect1} | Rest]) end.
+ end.
-test_parse3() ->
+parse3_http_test() ->
+ parse3(plain).
+
+parse3_https_test() ->
+ parse3(ssl).
+
+parse3(Transport) ->
ContentType = "multipart/form-data; boundary=---------------------------7386909285754635891697677882",
BinContent = <<"-----------------------------7386909285754635891697677882\r\nContent-Disposition: form-data; name=\"hidden\"\r\n\r\nmultipart message\r\n-----------------------------7386909285754635891697677882\r\nContent-Disposition: form-data; name=\"file\"; filename=\"test_file.txt\"\r\nContent-Type: text/plain\r\n\r\nWoo multiline text file\n\nLa la la\r\n-----------------------------7386909285754635891697677882--\r\n">>,
Expect = [{headers,
@@ -285,8 +377,8 @@ test_parse3() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -295,11 +387,16 @@ test_parse3() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
+parse2_http_test() ->
+ parse2(plain).
+
+parse2_https_test() ->
+ parse2(ssl).
-test_parse2() ->
+parse2(Transport) ->
ContentType = "multipart/form-data; boundary=---------------------------6072231407570234361599764024",
BinContent = <<"-----------------------------6072231407570234361599764024\r\nContent-Disposition: form-data; name=\"hidden\"\r\n\r\nmultipart message\r\n-----------------------------6072231407570234361599764024\r\nContent-Disposition: form-data; name=\"file\"; filename=\"\"\r\nContent-Type: application/octet-stream\r\n\r\n\r\n-----------------------------6072231407570234361599764024--\r\n">>,
Expect = [{headers,
@@ -316,8 +413,8 @@ test_parse2() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -326,10 +423,16 @@ test_parse2() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_parse_form() ->
+parse_form_http_test() ->
+ do_parse_form(plain).
+
+parse_form_https_test() ->
+ do_parse_form(ssl).
+
+do_parse_form(Transport) ->
ContentType = "multipart/form-data; boundary=AaB03x",
"AaB03x" = get_boundary(ContentType),
Content = mochiweb_util:join(
@@ -347,8 +450,8 @@ test_parse_form() ->
""], "\r\n"),
BinContent = iolist_to_binary(Content),
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -360,10 +463,16 @@ test_parse_form() ->
}] = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_parse() ->
+parse_http_test() ->
+ do_parse(plain).
+
+parse_https_test() ->
+ do_parse(ssl).
+
+do_parse(Transport) ->
ContentType = "multipart/form-data; boundary=AaB03x",
"AaB03x" = get_boundary(ContentType),
Content = mochiweb_util:join(
@@ -394,8 +503,113 @@ test_parse() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
+ end,
+ ClientFun = fun (Socket) ->
+ Req = fake_request(Socket, ContentType,
+ byte_size(BinContent)),
+ Res = parse_multipart_request(Req, TestCallback),
+ {0, <<>>, ok} = Res,
+ ok
+ end,
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
+ ok.
+
+parse_partial_body_boundary_http_test() ->
+ parse_partial_body_boundary(plain).
+
+parse_partial_body_boundary_https_test() ->
+ parse_partial_body_boundary(ssl).
+
+parse_partial_body_boundary(Transport) ->
+ Boundary = string:copies("$", 2048),
+ ContentType = "multipart/form-data; boundary=" ++ Boundary,
+ ?assertEqual(Boundary, get_boundary(ContentType)),
+ Content = mochiweb_util:join(
+ ["--" ++ Boundary,
+ "Content-Disposition: form-data; name=\"submit-name\"",
+ "",
+ "Larry",
+ "--" ++ Boundary,
+ "Content-Disposition: form-data; name=\"files\";"
+ ++ "filename=\"file1.txt\"",
+ "Content-Type: text/plain",
+ "",
+ "... contents of file1.txt ...",
+ "--" ++ Boundary ++ "--",
+ ""], "\r\n"),
+ BinContent = iolist_to_binary(Content),
+ Expect = [{headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "submit-name"}]}}]},
+ {body, <<"Larry">>},
+ body_end,
+ {headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
+ {"content-type", {"text/plain", []}}
+ ]},
+ {body, <<"... contents of file1.txt ...">>},
+ body_end,
+ eof],
+ TestCallback = fun (Next) -> test_callback(Next, Expect) end,
+ ServerFun = fun (Socket) ->
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
+ end,
+ ClientFun = fun (Socket) ->
+ Req = fake_request(Socket, ContentType,
+ byte_size(BinContent)),
+ Res = parse_multipart_request(Req, TestCallback),
+ {0, <<>>, ok} = Res,
+ ok
+ end,
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
+ ok.
+
+parse_large_header_http_test() ->
+ parse_large_header(plain).
+
+parse_large_header_https_test() ->
+ parse_large_header(ssl).
+
+parse_large_header(Transport) ->
+ ContentType = "multipart/form-data; boundary=AaB03x",
+ "AaB03x" = get_boundary(ContentType),
+ Content = mochiweb_util:join(
+ ["--AaB03x",
+ "Content-Disposition: form-data; name=\"submit-name\"",
+ "",
+ "Larry",
+ "--AaB03x",
+ "Content-Disposition: form-data; name=\"files\";"
+ ++ "filename=\"file1.txt\"",
+ "Content-Type: text/plain",
+ "x-large-header: " ++ string:copies("%", 4096),
+ "",
+ "... contents of file1.txt ...",
+ "--AaB03x--",
+ ""], "\r\n"),
+ BinContent = iolist_to_binary(Content),
+ Expect = [{headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "submit-name"}]}}]},
+ {body, <<"Larry">>},
+ body_end,
+ {headers,
+ [{"content-disposition",
+ {"form-data", [{"name", "files"}, {"filename", "file1.txt"}]}},
+ {"content-type", {"text/plain", []}},
+ {"x-large-header", {string:copies("%", 4096), []}}
+ ]},
+ {body, <<"... contents of file1.txt ...">>},
+ body_end,
+ eof],
+ TestCallback = fun (Next) -> test_callback(Next, Expect) end,
+ ServerFun = fun (Socket) ->
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -404,10 +618,10 @@ test_parse() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_find_boundary() ->
+find_boundary_test() ->
B = <<"\r\n--X">>,
{next_boundary, 0, 7} = find_boundary(B, <<"\r\n--X\r\nRest">>),
{next_boundary, 1, 7} = find_boundary(B, <<"!\r\n--X\r\nRest">>),
@@ -422,9 +636,10 @@ test_find_boundary() ->
45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,
49,54,48,51,55,52,53,52,51,53,49>>,
{maybe, 30} = find_boundary(P, B0),
+ not_found = find_boundary(B, <<"\r\n--XJOPKE">>),
ok.
-test_find_in_binary() ->
+find_in_binary_test() ->
{exact, 0} = find_in_binary(<<"foo">>, <<"foobarbaz">>),
{exact, 1} = find_in_binary(<<"oo">>, <<"foobarbaz">>),
{exact, 8} = find_in_binary(<<"z">>, <<"foobarbaz">>),
@@ -435,7 +650,13 @@ test_find_in_binary() ->
{partial, 1, 3} = find_in_binary(<<"foobar">>, <<"afoo">>),
ok.
-test_flash_parse() ->
+flash_parse_http_test() ->
+ flash_parse(plain).
+
+flash_parse_https_test() ->
+ flash_parse(ssl).
+
+flash_parse(Transport) ->
ContentType = "multipart/form-data; boundary=----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5",
"----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5" = get_boundary(ContentType),
BinContent = <<"------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Filename\"\r\n\r\nhello.txt\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"success_action_status\"\r\n\r\n201\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"file\"; filename=\"hello.txt\"\r\nContent-Type: application/octet-stream\r\n\r\nhello\n\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5\r\nContent-Disposition: form-data; name=\"Upload\"\r\n\r\nSubmit Query\r\n------------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5--">>,
@@ -463,8 +684,8 @@ test_flash_parse() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -473,10 +694,16 @@ test_flash_parse() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
ok.
-test_flash_parse2() ->
+flash_parse2_http_test() ->
+ flash_parse2(plain).
+
+flash_parse2_https_test() ->
+ flash_parse2(ssl).
+
+flash_parse2(Transport) ->
ContentType = "multipart/form-data; boundary=----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5",
"----------ei4GI3GI3Ij5Ef1ae0KM7Ij5ei4Ij5" = get_boundary(ContentType),
Chunk = iolist_to_binary(string:copies("%", 4096)),
@@ -505,8 +732,8 @@ test_flash_parse2() ->
eof],
TestCallback = fun (Next) -> test_callback(Next, Expect) end,
ServerFun = fun (Socket) ->
- ok = gen_tcp:send(Socket, BinContent),
- exit(normal)
+ ok = mochiweb_socket:send(Socket, BinContent),
+ exit(normal)
end,
ClientFun = fun (Socket) ->
Req = fake_request(Socket, ContentType,
@@ -515,16 +742,83 @@ test_flash_parse2() ->
{0, <<>>, ok} = Res,
ok
end,
- ok = with_socket_server(ServerFun, ClientFun),
+ ok = with_socket_server(Transport, ServerFun, ClientFun),
+ ok.
+
+parse_headers_test() ->
+ ?assertEqual([], parse_headers(<<>>)).
+
+flash_multipart_hack_test() ->
+ Buffer = <<"prefix-">>,
+ Prefix = <<"prefix">>,
+ State = #mp{length=0, buffer=Buffer, boundary=Prefix},
+ ?assertEqual(State,
+ flash_multipart_hack(State)).
+
+parts_to_body_single_test() ->
+ {HL, B} = parts_to_body([{0, 5, <<"01234">>}],
+ "text/plain",
+ 10),
+ [{"Content-Range", Range},
+ {"Content-Type", Type}] = lists:sort(HL),
+ ?assertEqual(
+ <<"bytes 0-5/10">>,
+ iolist_to_binary(Range)),
+ ?assertEqual(
+ <<"text/plain">>,
+ iolist_to_binary(Type)),
+ ?assertEqual(
+ <<"01234">>,
+ iolist_to_binary(B)),
+ ok.
+
+parts_to_body_multi_test() ->
+ {[{"Content-Type", Type}],
+ _B} = parts_to_body([{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ 10),
+ ?assertMatch(
+ <<"multipart/byteranges; boundary=", _/binary>>,
+ iolist_to_binary(Type)),
ok.
-test() ->
- test_find_in_binary(),
- test_find_boundary(),
- test_parse(),
- test_parse2(),
- test_parse3(),
- test_parse_form(),
- test_flash_parse(),
- test_flash_parse2(),
+parts_to_multipart_body_test() ->
+ {[{"Content-Type", V}], B} = parts_to_multipart_body(
+ [{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ 10,
+ "BOUNDARY"),
+ MB = multipart_body(
+ [{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ "BOUNDARY",
+ 10),
+ ?assertEqual(
+ <<"multipart/byteranges; boundary=BOUNDARY">>,
+ iolist_to_binary(V)),
+ ?assertEqual(
+ iolist_to_binary(MB),
+ iolist_to_binary(B)),
ok.
+
+multipart_body_test() ->
+ ?assertEqual(
+ <<"--BOUNDARY--\r\n">>,
+ iolist_to_binary(multipart_body([], "text/plain", "BOUNDARY", 0))),
+ ?assertEqual(
+ <<"--BOUNDARY\r\n"
+ "Content-Type: text/plain\r\n"
+ "Content-Range: bytes 0-5/10\r\n\r\n"
+ "01234\r\n"
+ "--BOUNDARY\r\n"
+ "Content-Type: text/plain\r\n"
+ "Content-Range: bytes 5-10/10\r\n\r\n"
+ "56789\r\n"
+ "--BOUNDARY--\r\n">>,
+ iolist_to_binary(multipart_body([{0, 5, <<"01234">>}, {5, 10, <<"56789">>}],
+ "text/plain",
+ "BOUNDARY",
+ 10))),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/mochiweb_request.erl b/src/mochiweb/mochiweb_request.erl
index fc296f40..ffe4e9eb 100644
--- a/src/mochiweb/mochiweb_request.erl
+++ b/src/mochiweb/mochiweb_request.erl
@@ -7,9 +7,9 @@
-author('bob@mochimedia.com').
-include_lib("kernel/include/file.hrl").
+-include("internal.hrl").
-define(QUIP, "Any of you quaids got a smint?").
--define(READ_SIZE, 8192).
-export([get_header_value/1, get_primary_header_value/1, get/1, dump/0]).
-export([send/1, recv/1, recv/2, recv_body/0, recv_body/1, stream_body/3]).
@@ -20,7 +20,8 @@
-export([should_close/0, cleanup/0]).
-export([parse_cookie/0, get_cookie_value/1]).
-export([serve_file/2, serve_file/3]).
--export([test/0]).
+-export([accepted_encodings/1]).
+-export([accepts_content_type/1]).
-define(SAVE_QS, mochiweb_request_qs).
-define(SAVE_PATH, mochiweb_request_path).
@@ -39,8 +40,8 @@
%% @type response(). A mochiweb_response parameterized module instance.
%% @type ioheaders() = headers() | [{key(), value()}].
-% 5 minute default idle timeout
--define(IDLE_TIMEOUT, 300000).
+% 10 second default idle timeout
+-define(IDLE_TIMEOUT, 10000).
% Maximum recv_body() length of 1MB
-define(MAX_RECV_BODY, (1024*1024)).
@@ -53,12 +54,23 @@ get_header_value(K) ->
get_primary_header_value(K) ->
mochiweb_headers:get_primary_value(K, Headers).
-%% @type field() = socket | method | raw_path | version | headers | peer | path | body_length | range
+%% @type field() = socket | scheme | method | raw_path | version | headers | peer | path | body_length | range
%% @spec get(field()) -> term()
-%% @doc Return the internal representation of the given field.
+%% @doc Return the internal representation of the given field. If
+%% <code>socket</code> is requested on a HTTPS connection, then
+%% an ssl socket will be returned as <code>{ssl, SslSocket}</code>.
+%% You can use <code>SslSocket</code> with the <code>ssl</code>
+%% application, eg: <code>ssl:peercert(SslSocket)</code>.
get(socket) ->
Socket;
+get(scheme) ->
+ case mochiweb_socket:type(Socket) of
+ plain ->
+ http;
+ ssl ->
+ https
+ end;
get(method) ->
Method;
get(raw_path) ->
@@ -68,7 +80,7 @@ get(version) ->
get(headers) ->
Headers;
get(peer) ->
- case inet:peername(Socket) of
+ case mochiweb_socket:peername(Socket) of
{ok, {Addr={10, _, _, _}, _Port}} ->
case get_header_value("x-forwarded-for") of
undefined ->
@@ -84,7 +96,9 @@ get(peer) ->
string:strip(lists:last(string:tokens(Hosts, ",")))
end;
{ok, {Addr, _Port}} ->
- inet_parse:ntoa(Addr)
+ inet_parse:ntoa(Addr);
+ {error, enotconn} ->
+ exit(normal)
end;
get(path) ->
case erlang:get(?SAVE_PATH) of
@@ -97,13 +111,20 @@ get(path) ->
Cached
end;
get(body_length) ->
- erlang:get(?SAVE_BODY_LENGTH);
+ case erlang:get(?SAVE_BODY_LENGTH) of
+ undefined ->
+ BodyLength = body_length(),
+ put(?SAVE_BODY_LENGTH, {cached, BodyLength}),
+ BodyLength;
+ {cached, Cached} ->
+ Cached
+ end;
get(range) ->
case get_header_value(range) of
undefined ->
undefined;
RawRange ->
- parse_range_request(RawRange)
+ mochiweb_http:parse_range_request(RawRange)
end.
%% @spec dump() -> {mochiweb_request, [{atom(), term()}]}
@@ -118,7 +139,7 @@ dump() ->
%% @spec send(iodata()) -> ok
%% @doc Send data over the socket.
send(Data) ->
- case gen_tcp:send(Socket, Data) of
+ case mochiweb_socket:send(Socket, Data) of
ok ->
ok;
_ ->
@@ -135,7 +156,7 @@ recv(Length) ->
%% @doc Receive Length bytes from the client as a binary, with the given
%% Timeout in msec.
recv(Length, Timeout) ->
- case gen_tcp:recv(Socket, Length, Timeout) of
+ case mochiweb_socket:recv(Socket, Length, Timeout) of
{ok, Data} ->
put(?SAVE_RECV, true),
Data;
@@ -171,20 +192,24 @@ recv_body() ->
%% @doc Receive the body of the HTTP request (defined by Content-Length).
%% Will receive up to MaxBody bytes.
recv_body(MaxBody) ->
- % we could use a sane constant for max chunk size
- Body = stream_body(?MAX_RECV_BODY, fun
- ({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
- iolist_to_binary(lists:reverse(BinAcc));
- ({Length, Bin}, {LengthAcc, BinAcc}) ->
- NewLength = Length + LengthAcc,
- if NewLength > MaxBody ->
- exit({body_too_large, chunked});
- true ->
- {NewLength, [Bin | BinAcc]}
- end
- end, {0, []}, MaxBody),
- put(?SAVE_BODY, Body),
- Body.
+ case erlang:get(?SAVE_BODY) of
+ undefined ->
+ % we could use a sane constant for max chunk size
+ Body = stream_body(?MAX_RECV_BODY, fun
+ ({0, _ChunkedFooter}, {_LengthAcc, BinAcc}) ->
+ iolist_to_binary(lists:reverse(BinAcc));
+ ({Length, Bin}, {LengthAcc, BinAcc}) ->
+ NewLength = Length + LengthAcc,
+ if NewLength > MaxBody ->
+ exit({body_too_large, chunked});
+ true ->
+ {NewLength, [Bin | BinAcc]}
+ end
+ end, {0, []}, MaxBody),
+ put(?SAVE_BODY, Body),
+ Body;
+ Cached -> Cached
+ end.
stream_body(MaxChunkSize, ChunkFun, FunState) ->
stream_body(MaxChunkSize, ChunkFun, FunState, undefined).
@@ -241,7 +266,7 @@ start_response({Code, ResponseHeaders}) ->
%% ResponseHeaders.
start_raw_response({Code, ResponseHeaders}) ->
F = fun ({K, V}, Acc) ->
- [make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
+ [mochiweb_util:make_io(K), <<": ">>, V, <<"\r\n">> | Acc]
end,
End = lists:foldl(F, [<<"\r\n">>],
mochiweb_headers:to_list(ResponseHeaders)),
@@ -265,13 +290,13 @@ start_response_length({Code, ResponseHeaders, Length}) ->
%% will be set by the Body length, and the server will insert header
%% defaults.
respond({Code, ResponseHeaders, {file, IoDevice}}) ->
- Length = iodevice_size(IoDevice),
+ Length = mochiweb_io:iodevice_size(IoDevice),
Response = start_response_length({Code, ResponseHeaders, Length}),
case Method of
'HEAD' ->
ok;
_ ->
- iodevice_stream(IoDevice)
+ mochiweb_io:iodevice_stream(fun send/1, IoDevice)
end,
Response;
respond({Code, ResponseHeaders, chunked}) ->
@@ -326,8 +351,12 @@ ok({ContentType, Body}) ->
ok({ContentType, ResponseHeaders, Body}) ->
HResponse = mochiweb_headers:make(ResponseHeaders),
case THIS:get(range) of
- X when X =:= undefined; X =:= fail ->
- HResponse1 = mochiweb_headers:enter("Content-Type", ContentType, HResponse),
+ X when (X =:= undefined orelse X =:= fail) orelse Body =:= chunked ->
+ %% http://code.google.com/p/mochiweb/issues/detail?id=54
+ %% Range header not supported when chunked, return 200 and provide
+ %% full response.
+ HResponse1 = mochiweb_headers:enter("Content-Type", ContentType,
+ HResponse),
respond({200, HResponse1, Body});
Ranges ->
{PartList, Size} = range_parts(Body, Ranges),
@@ -340,7 +369,7 @@ ok({ContentType, ResponseHeaders, Body}) ->
respond({200, HResponse1, Body});
PartList ->
{RangeHeaders, RangeBody} =
- parts_to_body(PartList, ContentType, Size),
+ mochiweb_multipart:parts_to_body(PartList, ContentType, Size),
HResponse1 = mochiweb_headers:enter_from_list(
[{"Accept-Ranges", "bytes"} |
RangeHeaders],
@@ -457,26 +486,23 @@ stream_chunked_body(MaxChunkSize, Fun, FunState) ->
stream_unchunked_body(0, Fun, FunState) ->
Fun({0, <<>>}, FunState);
stream_unchunked_body(Length, Fun, FunState) when Length > 0 ->
- Bin = recv(0),
- BinSize = byte_size(Bin),
- if BinSize > Length ->
- <<OurBody:Length/binary, Extra/binary>> = Bin,
- gen_tcp:unrecv(Socket, Extra),
- NewState = Fun({Length, OurBody}, FunState),
- stream_unchunked_body(0, Fun, NewState);
- true ->
- NewState = Fun({BinSize, Bin}, FunState),
- stream_unchunked_body(Length - BinSize, Fun, NewState)
- end.
-
+ PktSize = case Length > ?RECBUF_SIZE of
+ true ->
+ ?RECBUF_SIZE;
+ false ->
+ Length
+ end,
+ Bin = recv(PktSize),
+ NewState = Fun({PktSize, Bin}, FunState),
+ stream_unchunked_body(Length - PktSize, Fun, NewState).
%% @spec read_chunk_length() -> integer()
%% @doc Read the length of the next HTTP chunk.
read_chunk_length() ->
- inet:setopts(Socket, [{packet, line}]),
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ mochiweb_socket:setopts(Socket, [{packet, line}]),
+ case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, Header} ->
- inet:setopts(Socket, [{packet, raw}]),
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
Splitter = fun (C) ->
C =/= $\r andalso C =/= $\n andalso C =/= $
end,
@@ -490,9 +516,9 @@ read_chunk_length() ->
%% @doc Read in a HTTP chunk of the given length. If Length is 0, then read the
%% HTTP footers (as a list of binaries, since they're nominal).
read_chunk(0) ->
- inet:setopts(Socket, [{packet, line}]),
+ mochiweb_socket:setopts(Socket, [{packet, line}]),
F = fun (F1, Acc) ->
- case gen_tcp:recv(Socket, 0, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 0, ?IDLE_TIMEOUT) of
{ok, <<"\r\n">>} ->
Acc;
{ok, Footer} ->
@@ -502,10 +528,11 @@ read_chunk(0) ->
end
end,
Footers = F(F, []),
- inet:setopts(Socket, [{packet, raw}]),
+ mochiweb_socket:setopts(Socket, [{packet, raw}]),
+ put(?SAVE_RECV, true),
Footers;
read_chunk(Length) ->
- case gen_tcp:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
+ case mochiweb_socket:recv(Socket, 2 + Length, ?IDLE_TIMEOUT) of
{ok, <<Chunk:Length/binary, "\r\n">>} ->
Chunk;
_ ->
@@ -600,13 +627,6 @@ server_headers() ->
[{"Server", "MochiWeb/1.0 (" ++ ?QUIP ++ ")"},
{"Date", httpd_util:rfc1123_date()}].
-make_io(Atom) when is_atom(Atom) ->
- atom_to_list(Atom);
-make_io(Integer) when is_integer(Integer) ->
- integer_to_list(Integer);
-make_io(Io) when is_list(Io); is_binary(Io) ->
- Io.
-
make_code(X) when is_integer(X) ->
[integer_to_list(X), [" " | httpd_util:reason_phrase(X)]];
make_code(Io) when is_list(Io); is_binary(Io) ->
@@ -617,56 +637,10 @@ make_version({1, 0}) ->
make_version(_) ->
<<"HTTP/1.1 ">>.
-iodevice_stream(IoDevice) ->
- case file:read(IoDevice, ?READ_SIZE) of
- eof ->
- ok;
- {ok, Data} ->
- ok = send(Data),
- iodevice_stream(IoDevice)
- end.
-
-
-parts_to_body([{Start, End, Body}], ContentType, Size) ->
- %% return body for a range reponse with a single body
- HeaderList = [{"Content-Type", ContentType},
- {"Content-Range",
- ["bytes ",
- make_io(Start), "-", make_io(End),
- "/", make_io(Size)]}],
- {HeaderList, Body};
-parts_to_body(BodyList, ContentType, Size) when is_list(BodyList) ->
- %% return
- %% header Content-Type: multipart/byteranges; boundary=441934886133bdee4
- %% and multipart body
- Boundary = mochihex:to_hex(crypto:rand_bytes(8)),
- HeaderList = [{"Content-Type",
- ["multipart/byteranges; ",
- "boundary=", Boundary]}],
- MultiPartBody = multipart_body(BodyList, ContentType, Boundary, Size),
-
- {HeaderList, MultiPartBody}.
-
-multipart_body([], _ContentType, Boundary, _Size) ->
- ["--", Boundary, "--\r\n"];
-multipart_body([{Start, End, Body} | BodyList], ContentType, Boundary, Size) ->
- ["--", Boundary, "\r\n",
- "Content-Type: ", ContentType, "\r\n",
- "Content-Range: ",
- "bytes ", make_io(Start), "-", make_io(End),
- "/", make_io(Size), "\r\n\r\n",
- Body, "\r\n"
- | multipart_body(BodyList, ContentType, Boundary, Size)].
-
-iodevice_size(IoDevice) ->
- {ok, Size} = file:position(IoDevice, eof),
- {ok, 0} = file:position(IoDevice, bof),
- Size.
-
range_parts({file, IoDevice}, Ranges) ->
- Size = iodevice_size(IoDevice),
+ Size = mochiweb_io:iodevice_size(IoDevice),
F = fun (Spec, Acc) ->
- case range_skip_length(Spec, Size) of
+ case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
V ->
@@ -684,7 +658,7 @@ range_parts(Body0, Ranges) ->
Body = iolist_to_binary(Body0),
Size = size(Body),
F = fun(Spec, Acc) ->
- case range_skip_length(Spec, Size) of
+ case mochiweb_http:range_skip_length(Spec, Size) of
invalid_range ->
Acc;
{Skip, Length} ->
@@ -694,133 +668,101 @@ range_parts(Body0, Ranges) ->
end,
{lists:foldr(F, [], Ranges), Size}.
-range_skip_length(Spec, Size) ->
- case Spec of
- {none, R} when R =< Size, R >= 0 ->
- {Size - R, R};
- {none, _OutOfRange} ->
- {0, Size};
- {R, none} when R >= 0, R < Size ->
- {R, Size - R};
- {_OutOfRange, none} ->
- invalid_range;
- {Start, End} when 0 =< Start, Start =< End, End < Size ->
- {Start, End - Start + 1};
- {_OutOfRange, _End} ->
- invalid_range
+%% @spec accepted_encodings([encoding()]) -> [encoding()] | bad_accept_encoding_value
+%% @type encoding() = string().
+%%
+%% @doc Returns a list of encodings accepted by a request. Encodings that are
+%% not supported by the server will not be included in the return list.
+%% This list is computed from the "Accept-Encoding" header and
+%% its elements are ordered, descendingly, according to their Q values.
+%%
+%% Section 14.3 of the RFC 2616 (HTTP 1.1) describes the "Accept-Encoding"
+%% header and the process of determining which server supported encodings
+%% can be used for encoding the body for the request's response.
+%%
+%% Examples
+%%
+%% 1) For a missing "Accept-Encoding" header:
+%% accepted_encodings(["gzip", "identity"]) -> ["identity"]
+%%
+%% 2) For an "Accept-Encoding" header with value "gzip, deflate":
+%% accepted_encodings(["gzip", "identity"]) -> ["gzip", "identity"]
+%%
+%% 3) For an "Accept-Encoding" header with value "gzip;q=0.5, deflate":
+%% accepted_encodings(["gzip", "deflate", "identity"]) ->
+%% ["deflate", "gzip", "identity"]
+%%
+accepted_encodings(SupportedEncodings) ->
+ AcceptEncodingHeader = case get_header_value("Accept-Encoding") of
+ undefined ->
+ "";
+ Value ->
+ Value
+ end,
+ case mochiweb_util:parse_qvalues(AcceptEncodingHeader) of
+ invalid_qvalue_string ->
+ bad_accept_encoding_value;
+ QList ->
+ mochiweb_util:pick_accepted_encodings(
+ QList, SupportedEncodings, "identity"
+ )
end.
-parse_range_request(RawRange) when is_list(RawRange) ->
- try
- "bytes=" ++ RangeString = RawRange,
- Ranges = string:tokens(RangeString, ","),
- lists:map(fun ("-" ++ V) ->
- {none, list_to_integer(V)};
- (R) ->
- case string:tokens(R, "-") of
- [S1, S2] ->
- {list_to_integer(S1), list_to_integer(S2)};
- [S] ->
- {list_to_integer(S), none}
- end
- end,
- Ranges)
- catch
- _:_ ->
- fail
+%% @spec accepts_content_type(string() | binary()) -> boolean() | bad_accept_header
+%%
+%% @doc Determines whether a request accepts a given media type by analyzing its
+%% "Accept" header.
+%%
+%% Examples
+%%
+%% 1) For a missing "Accept" header:
+%% accepts_content_type("application/json") -> true
+%%
+%% 2) For an "Accept" header with value "text/plain, application/*":
+%% accepts_content_type("application/json") -> true
+%%
+%% 3) For an "Accept" header with value "text/plain, */*; q=0.0":
+%% accepts_content_type("application/json") -> false
+%%
+%% 4) For an "Accept" header with value "text/plain; q=0.5, */*; q=0.1":
+%% accepts_content_type("application/json") -> true
+%%
+%% 5) For an "Accept" header with value "text/*; q=0.0, */*":
+%% accepts_content_type("text/plain") -> false
+%%
+accepts_content_type(ContentType) when is_binary(ContentType) ->
+ accepts_content_type(binary_to_list(ContentType));
+accepts_content_type(ContentType1) ->
+ ContentType = re:replace(ContentType1, "\\s", "", [global, {return, list}]),
+ AcceptHeader = case get_header_value("Accept") of
+ undefined ->
+ "*/*";
+ Value ->
+ Value
+ end,
+ case mochiweb_util:parse_qvalues(AcceptHeader) of
+ invalid_qvalue_string ->
+ bad_accept_header;
+ QList ->
+ [MainType, _SubType] = string:tokens(ContentType, "/"),
+ SuperType = MainType ++ "/*",
+ lists:any(
+ fun({"*/*", Q}) when Q > 0.0 ->
+ true;
+ ({Type, Q}) when Q > 0.0 ->
+ Type =:= ContentType orelse Type =:= SuperType;
+ (_) ->
+ false
+ end,
+ QList
+ ) andalso
+ (not lists:member({ContentType, 0.0}, QList)) andalso
+ (not lists:member({SuperType, 0.0}, QList))
end.
-
-test() ->
- ok = test_range(),
- ok.
-
-test_range() ->
- %% valid, single ranges
- io:format("Testing parse_range_request with valid single ranges~n"),
- io:format("1"),
- [{20, 30}] = parse_range_request("bytes=20-30"),
- io:format("2"),
- [{20, none}] = parse_range_request("bytes=20-"),
- io:format("3"),
- [{none, 20}] = parse_range_request("bytes=-20"),
- io:format(".. ok ~n"),
-
- %% invalid, single ranges
- io:format("Testing parse_range_request with invalid ranges~n"),
- io:format("1"),
- fail = parse_range_request(""),
- io:format("2"),
- fail = parse_range_request("garbage"),
- io:format("3"),
- fail = parse_range_request("bytes=-20-30"),
- io:format(".. ok ~n"),
-
- %% valid, multiple range
- io:format("Testing parse_range_request with valid multiple ranges~n"),
- io:format("1"),
- [{20, 30}, {50, 100}, {110, 200}] =
- parse_range_request("bytes=20-30,50-100,110-200"),
- io:format("2"),
- [{20, none}, {50, 100}, {none, 200}] =
- parse_range_request("bytes=20-,50-100,-200"),
- io:format(".. ok~n"),
-
- %% no ranges
- io:format("Testing out parse_range_request with no ranges~n"),
- io:format("1"),
- [] = parse_range_request("bytes="),
- io:format(".. ok~n"),
-
- Body = <<"012345678901234567890123456789012345678901234567890123456789">>,
- BodySize = byte_size(Body), %% 60
- BodySize = 60,
-
- %% these values assume BodySize =:= 60
- io:format("Testing out range_skip_length on valid ranges~n"),
- io:format("1"),
- {1,9} = range_skip_length({1,9}, BodySize), %% 1-9
- io:format("2"),
- {10,10} = range_skip_length({10,19}, BodySize), %% 10-19
- io:format("3"),
- {40, 20} = range_skip_length({none, 20}, BodySize), %% -20
- io:format("4"),
- {30, 30} = range_skip_length({30, none}, BodySize), %% 30-
- io:format(".. ok ~n"),
-
- %% valid edge cases for range_skip_length
- io:format("Testing out range_skip_length on valid edge case ranges~n"),
- io:format("1"),
- {BodySize, 0} = range_skip_length({none, 0}, BodySize),
- io:format("2"),
- {0, BodySize} = range_skip_length({none, BodySize}, BodySize),
- io:format("3"),
- {0, BodySize} = range_skip_length({0, none}, BodySize),
- BodySizeLess1 = BodySize - 1,
- io:format("4"),
- {BodySizeLess1, 1} = range_skip_length({BodySize - 1, none}, BodySize),
-
- %% out of range, return whole thing
- io:format("5"),
- {0, BodySize} = range_skip_length({none, BodySize + 1}, BodySize),
- io:format("6"),
- {0, BodySize} = range_skip_length({none, -1}, BodySize),
- io:format(".. ok ~n"),
-
- %% invalid ranges
- io:format("Testing out range_skip_length on invalid ranges~n"),
- io:format("1"),
- invalid_range = range_skip_length({-1, 30}, BodySize),
- io:format("2"),
- invalid_range = range_skip_length({0, BodySize + 1}, BodySize),
- io:format("3"),
- invalid_range = range_skip_length({-1, BodySize + 1}, BodySize),
- io:format("4"),
- invalid_range = range_skip_length({BodySize, 40}, BodySize),
- io:format("5"),
- invalid_range = range_skip_length({-1, none}, BodySize),
- io:format("6"),
- invalid_range = range_skip_length({BodySize, none}, BodySize),
- io:format(".. ok ~n"),
- ok.
-
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_response.erl b/src/mochiweb/mochiweb_response.erl
index 6285c4c4..ab8ee61c 100644
--- a/src/mochiweb/mochiweb_response.erl
+++ b/src/mochiweb/mochiweb_response.erl
@@ -54,3 +54,11 @@ write_chunk(Data) ->
_ ->
send(Data)
end.
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_skel.erl b/src/mochiweb/mochiweb_skel.erl
index 36b48be5..76eefa60 100644
--- a/src/mochiweb/mochiweb_skel.erl
+++ b/src/mochiweb/mochiweb_skel.erl
@@ -14,10 +14,11 @@ skelcopy(DestDir, Name) ->
N + 1
end,
skelcopy(src(), DestDir, Name, LDst),
+ DestLink = filename:join([DestDir, Name, "deps", "mochiweb-src"]),
+ ok = filelib:ensure_dir(DestLink),
ok = file:make_symlink(
- filename:join(filename:dirname(code:which(?MODULE)), ".."),
- filename:join([DestDir, Name, "deps", "mochiweb-src"])).
-
+ filename:join(filename:dirname(code:which(?MODULE)), ".."),
+ DestLink).
%% Internal API
@@ -37,17 +38,22 @@ skelcopy(Src, DestDir, Name, LDst) ->
EDst = lists:nthtail(LDst, Dir),
ok = ensuredir(Dir),
ok = file:write_file_info(Dir, #file_info{mode=Mode}),
- {ok, Files} = file:list_dir(Src),
- io:format("~s/~n", [EDst]),
- lists:foreach(fun ("." ++ _) -> ok;
- (F) ->
- skelcopy(filename:join(Src, F),
- Dir,
- Name,
- LDst)
- end,
- Files),
- ok;
+ case filename:basename(Src) of
+ "ebin" ->
+ ok;
+ _ ->
+ {ok, Files} = file:list_dir(Src),
+ io:format("~s/~n", [EDst]),
+ lists:foreach(fun ("." ++ _) -> ok;
+ (F) ->
+ skelcopy(filename:join(Src, F),
+ Dir,
+ Name,
+ LDst)
+ end,
+ Files),
+ ok
+ end;
{ok, #file_info{type=regular, mode=Mode}} ->
OutFile = filename:join(DestDir, Dest),
{ok, B} = file:read_file(Src),
@@ -71,3 +77,10 @@ ensuredir(Dir) ->
E ->
E
end.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_socket.erl b/src/mochiweb/mochiweb_socket.erl
new file mode 100644
index 00000000..76b018c8
--- /dev/null
+++ b/src/mochiweb/mochiweb_socket.erl
@@ -0,0 +1,84 @@
+%% @copyright 2010 Mochi Media, Inc.
+
+%% @doc MochiWeb socket - wrapper for plain and ssl sockets.
+
+-module(mochiweb_socket).
+
+-export([listen/4, accept/1, recv/3, send/2, close/1, port/1, peername/1,
+ setopts/2, type/1]).
+
+-define(ACCEPT_TIMEOUT, 2000).
+
+listen(Ssl, Port, Opts, SslOpts) ->
+ case Ssl of
+ true ->
+ case ssl:listen(Port, Opts ++ SslOpts) of
+ {ok, ListenSocket} ->
+ {ok, {ssl, ListenSocket}};
+ {error, _} = Err ->
+ Err
+ end;
+ false ->
+ gen_tcp:listen(Port, Opts)
+ end.
+
+accept({ssl, ListenSocket}) ->
+ % There's a bug in ssl:transport_accept/2 at the moment, which is the
+ % reason for the try...catch block. Should be fixed in OTP R14.
+ try ssl:transport_accept(ListenSocket) of
+ {ok, Socket} ->
+ case ssl:ssl_accept(Socket) of
+ ok ->
+ {ok, {ssl, Socket}};
+ {error, _} = Err ->
+ Err
+ end;
+ {error, _} = Err ->
+ Err
+ catch
+ error:{badmatch, {error, Reason}} ->
+ {error, Reason}
+ end;
+accept(ListenSocket) ->
+ gen_tcp:accept(ListenSocket, ?ACCEPT_TIMEOUT).
+
+recv({ssl, Socket}, Length, Timeout) ->
+ ssl:recv(Socket, Length, Timeout);
+recv(Socket, Length, Timeout) ->
+ gen_tcp:recv(Socket, Length, Timeout).
+
+send({ssl, Socket}, Data) ->
+ ssl:send(Socket, Data);
+send(Socket, Data) ->
+ gen_tcp:send(Socket, Data).
+
+close({ssl, Socket}) ->
+ ssl:close(Socket);
+close(Socket) ->
+ gen_tcp:close(Socket).
+
+port({ssl, Socket}) ->
+ case ssl:sockname(Socket) of
+ {ok, {_, Port}} ->
+ {ok, Port};
+ {error, _} = Err ->
+ Err
+ end;
+port(Socket) ->
+ inet:port(Socket).
+
+peername({ssl, Socket}) ->
+ ssl:peername(Socket);
+peername(Socket) ->
+ inet:peername(Socket).
+
+setopts({ssl, Socket}, Opts) ->
+ ssl:setopts(Socket, Opts);
+setopts(Socket, Opts) ->
+ inet:setopts(Socket, Opts).
+
+type({ssl, _}) ->
+ ssl;
+type(_) ->
+ plain.
+
diff --git a/src/mochiweb/mochiweb_socket_server.erl b/src/mochiweb/mochiweb_socket_server.erl
index 7aafe290..1aae09ac 100644
--- a/src/mochiweb/mochiweb_socket_server.erl
+++ b/src/mochiweb/mochiweb_socket_server.erl
@@ -7,22 +7,28 @@
-author('bob@mochimedia.com').
-behaviour(gen_server).
+-include("internal.hrl").
+
-export([start/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, terminate/2, code_change/3,
handle_info/2]).
-export([get/2]).
--export([acceptor_loop/1]).
-
-record(mochiweb_socket_server,
{port,
loop,
name=undefined,
+ %% NOTE: This is currently ignored.
max=2048,
ip=any,
listen=null,
- acceptor=null,
- backlog=128}).
+ nodelay=false,
+ backlog=128,
+ active_sockets=0,
+ acceptor_pool_size=16,
+ ssl=false,
+ ssl_opts=[{ssl_imp, new}],
+ acceptor_pool=sets:new()}).
start(State=#mochiweb_socket_server{}) ->
start_server(State);
@@ -54,6 +60,8 @@ parse_options([], State) ->
parse_options([{name, L} | Rest], State) when is_list(L) ->
Name = {local, list_to_atom(L)},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
+parse_options([{name, A} | Rest], State) when A =:= undefined ->
+ parse_options(Rest, State#mochiweb_socket_server{name=A});
parse_options([{name, A} | Rest], State) when is_atom(A) ->
Name = {local, A},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
@@ -79,16 +87,32 @@ parse_options([{loop, Loop} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{loop=Loop});
parse_options([{backlog, Backlog} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog});
+parse_options([{nodelay, NoDelay} | Rest], State) ->
+ parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay});
+parse_options([{acceptor_pool_size, Max} | Rest], State) ->
+ MaxInt = ensure_int(Max),
+ parse_options(Rest,
+ State#mochiweb_socket_server{acceptor_pool_size=MaxInt});
parse_options([{max, Max} | Rest], State) ->
- MaxInt = case Max of
- Max when is_list(Max) ->
- list_to_integer(Max);
- Max when is_integer(Max) ->
- Max
- end,
- parse_options(Rest, State#mochiweb_socket_server{max=MaxInt}).
-
-start_server(State=#mochiweb_socket_server{name=Name}) ->
+ error_logger:info_report([{warning, "TODO: max is currently unsupported"},
+ {max, Max}]),
+ MaxInt = ensure_int(Max),
+ parse_options(Rest, State#mochiweb_socket_server{max=MaxInt});
+parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) ->
+ parse_options(Rest, State#mochiweb_socket_server{ssl=Ssl});
+parse_options([{ssl_opts, SslOpts} | Rest], State) when is_list(SslOpts) ->
+ SslOpts1 = [{ssl_imp, new} | proplists:delete(ssl_imp, SslOpts)],
+ parse_options(Rest, State#mochiweb_socket_server{ssl_opts=SslOpts1}).
+
+start_server(State=#mochiweb_socket_server{ssl=Ssl, name=Name}) ->
+ case Ssl of
+ true ->
+ application:start(crypto),
+ application:start(public_key),
+ application:start(ssl);
+ false ->
+ void
+ end,
case Name of
undefined ->
gen_server:start_link(?MODULE, State, []);
@@ -96,6 +120,11 @@ start_server(State=#mochiweb_socket_server{name=Name}) ->
gen_server:start_link(Name, ?MODULE, State, [])
end.
+ensure_int(N) when is_integer(N) ->
+ N;
+ensure_int(S) when is_list(S) ->
+ integer_to_list(S).
+
ipv6_supported() ->
case (catch inet:getaddr("localhost", inet6)) of
{ok, _Addr} ->
@@ -104,15 +133,15 @@ ipv6_supported() ->
false
end.
-init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
+init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) ->
process_flag(trap_exit, true),
BaseOpts = [binary,
{reuseaddr, true},
{packet, 0},
{backlog, Backlog},
- {recbuf, 8192},
+ {recbuf, ?RECBUF_SIZE},
{active, false},
- {nodelay, true}],
+ {nodelay, NoDelay}],
Opts = case Ip of
any ->
case ipv6_supported() of % IPv4, and IPv6 if supported
@@ -124,7 +153,7 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
{_, _, _, _, _, _, _, _} -> % IPv6
[inet6, {ip, Ip} | BaseOpts]
end,
- case gen_tcp_listen(Port, Opts, State) of
+ case listen(Port, Opts, State) of
{stop, eacces} ->
case Port < 1024 of
true ->
@@ -132,7 +161,7 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
{ok, _} ->
case fdsrv:bind_socket(tcp, Port) of
{ok, Fd} ->
- gen_tcp_listen(Port, [{fd, Fd} | Opts], State);
+ listen(Port, [{fd, Fd} | Opts], State);
_ ->
{stop, fdsrv_bind_failed}
end;
@@ -146,47 +175,33 @@ init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog}) ->
Other
end.
-gen_tcp_listen(Port, Opts, State) ->
- case gen_tcp:listen(Port, Opts) of
+new_acceptor_pool(Listen,
+ State=#mochiweb_socket_server{acceptor_pool=Pool,
+ acceptor_pool_size=Size,
+ loop=Loop}) ->
+ F = fun (_, S) ->
+ Pid = mochiweb_acceptor:start_link(self(), Listen, Loop),
+ sets:add_element(Pid, S)
+ end,
+ Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)),
+ State#mochiweb_socket_server{acceptor_pool=Pool1}.
+
+listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) ->
+ case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of
{ok, Listen} ->
- {ok, ListenPort} = inet:port(Listen),
- {ok, new_acceptor(State#mochiweb_socket_server{listen=Listen,
- port=ListenPort})};
+ {ok, ListenPort} = mochiweb_socket:port(Listen),
+ {ok, new_acceptor_pool(
+ Listen,
+ State#mochiweb_socket_server{listen=Listen,
+ port=ListenPort})};
{error, Reason} ->
{stop, Reason}
end.
-new_acceptor(State=#mochiweb_socket_server{max=0}) ->
- io:format("Not accepting new connections~n"),
- State#mochiweb_socket_server{acceptor=null};
-new_acceptor(State=#mochiweb_socket_server{listen=Listen,loop=Loop}) ->
- Pid = proc_lib:spawn_link(?MODULE, acceptor_loop,
- [{self(), Listen, Loop}]),
- State#mochiweb_socket_server{acceptor=Pid}.
-
-call_loop({M, F}, Socket) ->
- M:F(Socket);
-call_loop(Loop, Socket) ->
- Loop(Socket).
-
-acceptor_loop({Server, Listen, Loop}) ->
- case catch gen_tcp:accept(Listen) of
- {ok, Socket} ->
- gen_server:cast(Server, {accepted, self()}),
- call_loop(Loop, Socket);
- {error, closed} ->
- exit({error, closed});
- Other ->
- error_logger:error_report(
- [{application, mochiweb},
- "Accept failed error",
- lists:flatten(io_lib:format("~p", [Other]))]),
- exit({error, accept_failed})
- end.
-
-
do_get(port, #mochiweb_socket_server{port=Port}) ->
- Port.
+ Port;
+do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) ->
+ ActiveSockets.
handle_call({get, Property}, _From, State) ->
Res = do_get(Property, State),
@@ -195,16 +210,15 @@ handle_call(_Message, _From, State) ->
Res = error,
{reply, Res, State}.
-handle_cast({accepted, Pid},
- State=#mochiweb_socket_server{acceptor=Pid, max=Max}) ->
- % io:format("accepted ~p~n", [Pid]),
- State1 = State#mochiweb_socket_server{max=Max - 1},
- {noreply, new_acceptor(State1)};
+handle_cast({accepted, Pid, _Timing},
+ State=#mochiweb_socket_server{active_sockets=ActiveSockets}) ->
+ State1 = State#mochiweb_socket_server{active_sockets=1 + ActiveSockets},
+ {noreply, recycle_acceptor(Pid, State1)};
handle_cast(stop, State) ->
{stop, normal, State}.
terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
- gen_tcp:close(Listen),
+ mochiweb_socket:close(Listen),
case Port < 1024 of
true ->
catch fdsrv:stop(),
@@ -216,33 +230,43 @@ terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
code_change(_OldVsn, State, _Extra) ->
State.
-handle_info({'EXIT', Pid, normal},
- State=#mochiweb_socket_server{acceptor=Pid}) ->
- % io:format("normal acceptor down~n"),
- {noreply, new_acceptor(State)};
+recycle_acceptor(Pid, State=#mochiweb_socket_server{
+ acceptor_pool=Pool,
+ listen=Listen,
+ loop=Loop,
+ active_sockets=ActiveSockets}) ->
+ case sets:is_element(Pid, Pool) of
+ true ->
+ Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop),
+ Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)),
+ State#mochiweb_socket_server{acceptor_pool=Pool1};
+ false ->
+ State#mochiweb_socket_server{active_sockets=ActiveSockets - 1}
+ end.
+
+handle_info({'EXIT', Pid, normal}, State) ->
+ {noreply, recycle_acceptor(Pid, State)};
handle_info({'EXIT', Pid, Reason},
- State=#mochiweb_socket_server{acceptor=Pid}) ->
- error_logger:error_report({?MODULE, ?LINE,
- {acceptor_error, Reason}}),
- timer:sleep(100),
- {noreply, new_acceptor(State)};
-handle_info({'EXIT', _LoopPid, Reason},
- State=#mochiweb_socket_server{acceptor=Pid, max=Max}) ->
- case Reason of
- normal ->
- ok;
- _ ->
+ State=#mochiweb_socket_server{acceptor_pool=Pool}) ->
+ case sets:is_element(Pid, Pool) of
+ true ->
+ %% If there was an unexpected error accepting, log and sleep.
error_logger:error_report({?MODULE, ?LINE,
- {child_error, Reason}})
+ {acceptor_error, Reason}}),
+ timer:sleep(100);
+ false ->
+ ok
end,
- State1 = State#mochiweb_socket_server{max=Max + 1},
- State2 = case Pid of
- null ->
- new_acceptor(State1);
- _ ->
- State1
- end,
- {noreply, State2};
+ {noreply, recycle_acceptor(Pid, State)};
handle_info(Info, State) ->
error_logger:info_report([{'INFO', Info}, {'State', State}]),
{noreply, State}.
+
+
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_sup.erl b/src/mochiweb/mochiweb_sup.erl
index 5cb525b5..af7df9b3 100644
--- a/src/mochiweb/mochiweb_sup.erl
+++ b/src/mochiweb/mochiweb_sup.erl
@@ -32,3 +32,10 @@ upgrade() ->
init([]) ->
Processes = [],
{ok, {{one_for_one, 10, 10}, Processes}}.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/src/mochiweb/mochiweb_util.erl b/src/mochiweb/mochiweb_util.erl
index 73cacea4..62ff0d06 100644
--- a/src/mochiweb/mochiweb_util.erl
+++ b/src/mochiweb/mochiweb_util.erl
@@ -9,10 +9,11 @@
-export([path_split/1]).
-export([urlsplit/1, urlsplit_path/1, urlunsplit/1, urlunsplit_path/1]).
-export([guess_mime/1, parse_header/1]).
--export([shell_quote/1, cmd/1, cmd_string/1, cmd_port/2]).
+-export([shell_quote/1, cmd/1, cmd_string/1, cmd_port/2, cmd_status/1]).
-export([record_to_proplist/2, record_to_proplist/3]).
-export([safe_relative_path/1, partition/2]).
--export([test/0]).
+-export([parse_qvalues/1, pick_accepted_encodings/3]).
+-export([make_io/1]).
-define(PERCENT, 37). % $\%
-define(FULLSTOP, 46). % $\.
@@ -114,11 +115,32 @@ cmd(Argv) ->
%% @spec cmd_string([string()]) -> string()
%% @doc Create a shell quoted command string from a list of arguments.
cmd_string(Argv) ->
- join([shell_quote(X) || X <- Argv], " ").
+ string:join([shell_quote(X) || X <- Argv], " ").
+
+%% @spec cmd_status([string()]) -> {ExitStatus::integer(), Stdout::binary()}
+%% @doc Accumulate the output and exit status from the given application, will be
+%% spawned with cmd_port/2.
+cmd_status(Argv) ->
+ Port = cmd_port(Argv, [exit_status, stderr_to_stdout,
+ use_stdio, binary]),
+ try cmd_loop(Port, [])
+ after catch port_close(Port)
+ end.
+
+%% @spec cmd_loop(port(), list()) -> {ExitStatus::integer(), Stdout::binary()}
+%% @doc Accumulate the output and exit status from a port.
+cmd_loop(Port, Acc) ->
+ receive
+ {Port, {exit_status, Status}} ->
+ {Status, iolist_to_binary(lists:reverse(Acc))};
+ {Port, {data, Data}} ->
+ cmd_loop(Port, [Data | Acc])
+ end.
-%% @spec join([string()], Separator) -> string()
-%% @doc Join a list of strings together with the given separator
-%% string or char.
+%% @spec join([iolist()], iolist()) -> iolist()
+%% @doc Join a list of strings or binaries together with the given separator
+%% string or char or binary. The output is flattened, but may be an
+%% iolist() instead of a string() if any of the inputs are binary().
join([], _Separator) ->
[];
join([S], _Separator) ->
@@ -159,10 +181,11 @@ quote_plus([C | Rest], Acc) ->
%% @spec urlencode([{Key, Value}]) -> string()
%% @doc URL encode the property list.
urlencode(Props) ->
- RevPairs = lists:foldl(fun ({K, V}, Acc) ->
- [[quote_plus(K), $=, quote_plus(V)] | Acc]
- end, [], Props),
- lists:flatten(revjoin(RevPairs, $&, [])).
+ Pairs = lists:foldr(
+ fun ({K, V}, Acc) ->
+ [quote_plus(K) ++ "=" ++ quote_plus(V) | Acc]
+ end, [], Props),
+ string:join(Pairs, "&").
%% @spec parse_qs(string() | binary()) -> [{Key, Value}]
%% @doc Parse a query string or application/x-www-form-urlencoded.
@@ -233,20 +256,31 @@ urlsplit(Url) ->
{Scheme, Netloc, Path, Query, Fragment}.
urlsplit_scheme(Url) ->
- urlsplit_scheme(Url, []).
+ case urlsplit_scheme(Url, []) of
+ no_scheme ->
+ {"", Url};
+ Res ->
+ Res
+ end.
-urlsplit_scheme([], Acc) ->
- {"", lists:reverse(Acc)};
-urlsplit_scheme(":" ++ Rest, Acc) ->
+urlsplit_scheme([C | Rest], Acc) when ((C >= $a andalso C =< $z) orelse
+ (C >= $A andalso C =< $Z) orelse
+ (C >= $0 andalso C =< $9) orelse
+ C =:= $+ orelse C =:= $- orelse
+ C =:= $.) ->
+ urlsplit_scheme(Rest, [C | Acc]);
+urlsplit_scheme([$: | Rest], Acc=[_ | _]) ->
{string:to_lower(lists:reverse(Acc)), Rest};
-urlsplit_scheme([C | Rest], Acc) ->
- urlsplit_scheme(Rest, [C | Acc]).
+urlsplit_scheme(_Rest, _Acc) ->
+ no_scheme.
urlsplit_netloc("//" ++ Rest) ->
urlsplit_netloc(Rest, []);
urlsplit_netloc(Path) ->
{"", Path}.
+urlsplit_netloc("", Acc) ->
+ {lists:reverse(Acc), ""};
urlsplit_netloc(Rest=[C | _], Acc) when C =:= $/; C =:= $?; C =:= $# ->
{lists:reverse(Acc), Rest};
urlsplit_netloc([C | Rest], Acc) ->
@@ -311,67 +345,11 @@ urlsplit_query([C | Rest], Acc) ->
%% @spec guess_mime(string()) -> string()
%% @doc Guess the mime type of a file by the extension of its filename.
guess_mime(File) ->
- case filename:extension(File) of
- ".html" ->
- "text/html";
- ".xhtml" ->
- "application/xhtml+xml";
- ".xml" ->
- "application/xml";
- ".css" ->
- "text/css";
- ".js" ->
- "application/x-javascript";
- ".jpg" ->
- "image/jpeg";
- ".gif" ->
- "image/gif";
- ".png" ->
- "image/png";
- ".swf" ->
- "application/x-shockwave-flash";
- ".zip" ->
- "application/zip";
- ".bz2" ->
- "application/x-bzip2";
- ".gz" ->
- "application/x-gzip";
- ".tar" ->
- "application/x-tar";
- ".tgz" ->
- "application/x-gzip";
- ".txt" ->
+ case mochiweb_mime:from_extension(filename:extension(File)) of
+ undefined ->
"text/plain";
- ".doc" ->
- "application/msword";
- ".pdf" ->
- "application/pdf";
- ".xls" ->
- "application/vnd.ms-excel";
- ".rtf" ->
- "application/rtf";
- ".mov" ->
- "video/quicktime";
- ".mp3" ->
- "audio/mpeg";
- ".z" ->
- "application/x-compress";
- ".wav" ->
- "audio/x-wav";
- ".ico" ->
- "image/x-icon";
- ".bmp" ->
- "image/bmp";
- ".m4a" ->
- "audio/mpeg";
- ".m3u" ->
- "audio/x-mpegurl";
- ".exe" ->
- "application/octet-stream";
- ".csv" ->
- "text/csv";
- _ ->
- "text/plain"
+ Mime ->
+ Mime
end.
%% @spec parse_header(string()) -> {Type, [{K, V}]}
@@ -435,44 +413,267 @@ shell_quote([C | Rest], Acc) when C =:= $\" orelse C =:= $\` orelse
shell_quote([C | Rest], Acc) ->
shell_quote(Rest, [C | Acc]).
-test() ->
- test_join(),
- test_quote_plus(),
- test_unquote(),
- test_urlencode(),
- test_parse_qs(),
- test_urlsplit_path(),
- test_urlunsplit_path(),
- test_urlsplit(),
- test_urlunsplit(),
- test_path_split(),
- test_guess_mime(),
- test_parse_header(),
- test_shell_quote(),
- test_cmd(),
- test_cmd_string(),
- test_partition(),
- test_safe_relative_path(),
+%% @spec parse_qvalues(string()) -> [qvalue()] | invalid_qvalue_string
+%% @type qvalue() = {media_type() | encoding() , float()}.
+%% @type media_type() = string().
+%% @type encoding() = string().
+%%
+%% @doc Parses a list (given as a string) of elements with Q values associated
+%% to them. Elements are separated by commas and each element is separated
+%% from its Q value by a semicolon. Q values are optional but when missing
+%% the value of an element is considered as 1.0. A Q value is always in the
+%% range [0.0, 1.0]. A Q value list is used for example as the value of the
+%% HTTP "Accept" and "Accept-Encoding" headers.
+%%
+%% Q values are described in section 2.9 of the RFC 2616 (HTTP 1.1).
+%%
+%% Example:
+%%
+%% parse_qvalues("gzip; q=0.5, deflate, identity;q=0.0") ->
+%% [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 0.0}]
+%%
+parse_qvalues(QValuesStr) ->
+ try
+ lists:map(
+ fun(Pair) ->
+ [Type | Params] = string:tokens(Pair, ";"),
+ NormParams = normalize_media_params(Params),
+ {Q, NonQParams} = extract_q(NormParams),
+ {string:join([string:strip(Type) | NonQParams], ";"), Q}
+ end,
+ string:tokens(string:to_lower(QValuesStr), ",")
+ )
+ catch
+ _Type:_Error ->
+ invalid_qvalue_string
+ end.
+
+normalize_media_params(Params) ->
+ {ok, Re} = re:compile("\\s"),
+ normalize_media_params(Re, Params, []).
+
+normalize_media_params(_Re, [], Acc) ->
+ lists:reverse(Acc);
+normalize_media_params(Re, [Param | Rest], Acc) ->
+ NormParam = re:replace(Param, Re, "", [global, {return, list}]),
+ normalize_media_params(Re, Rest, [NormParam | Acc]).
+
+extract_q(NormParams) ->
+ {ok, KVRe} = re:compile("^([^=]+)=([^=]+)$"),
+ {ok, QRe} = re:compile("^((?:0|1)(?:\\.\\d{1,3})?)$"),
+ extract_q(KVRe, QRe, NormParams, []).
+
+extract_q(_KVRe, _QRe, [], Acc) ->
+ {1.0, lists:reverse(Acc)};
+extract_q(KVRe, QRe, [Param | Rest], Acc) ->
+ case re:run(Param, KVRe, [{capture, [1, 2], list}]) of
+ {match, [Name, Value]} ->
+ case Name of
+ "q" ->
+ {match, [Q]} = re:run(Value, QRe, [{capture, [1], list}]),
+ QVal = case Q of
+ "0" ->
+ 0.0;
+ "1" ->
+ 1.0;
+ Else ->
+ list_to_float(Else)
+ end,
+ case QVal < 0.0 orelse QVal > 1.0 of
+ false ->
+ {QVal, lists:reverse(Acc) ++ Rest}
+ end;
+ _ ->
+ extract_q(KVRe, QRe, Rest, [Param | Acc])
+ end
+ end.
+
+%% @spec pick_accepted_encodings([qvalue()], [encoding()], encoding()) ->
+%% [encoding()]
+%%
+%% @doc Determines which encodings specified in the given Q values list are
+%% valid according to a list of supported encodings and a default encoding.
+%%
+%% The returned list of encodings is sorted, descendingly, according to the
+%% Q values of the given list. The last element of this list is the given
+%% default encoding unless this encoding is explicitily or implicitily
+%% marked with a Q value of 0.0 in the given Q values list.
+%% Note: encodings with the same Q value are kept in the same order as
+%% found in the input Q values list.
+%%
+%% This encoding picking process is described in section 14.3 of the
+%% RFC 2616 (HTTP 1.1).
+%%
+%% Example:
+%%
+%% pick_accepted_encodings(
+%% [{"gzip", 0.5}, {"deflate", 1.0}],
+%% ["gzip", "identity"],
+%% "identity"
+%% ) ->
+%% ["gzip", "identity"]
+%%
+pick_accepted_encodings(AcceptedEncs, SupportedEncs, DefaultEnc) ->
+ SortedQList = lists:reverse(
+ lists:sort(fun({_, Q1}, {_, Q2}) -> Q1 < Q2 end, AcceptedEncs)
+ ),
+ {Accepted, Refused} = lists:foldr(
+ fun({E, Q}, {A, R}) ->
+ case Q > 0.0 of
+ true ->
+ {[E | A], R};
+ false ->
+ {A, [E | R]}
+ end
+ end,
+ {[], []},
+ SortedQList
+ ),
+ Refused1 = lists:foldr(
+ fun(Enc, Acc) ->
+ case Enc of
+ "*" ->
+ lists:subtract(SupportedEncs, Accepted) ++ Acc;
+ _ ->
+ [Enc | Acc]
+ end
+ end,
+ [],
+ Refused
+ ),
+ Accepted1 = lists:foldr(
+ fun(Enc, Acc) ->
+ case Enc of
+ "*" ->
+ lists:subtract(SupportedEncs, Accepted ++ Refused1) ++ Acc;
+ _ ->
+ [Enc | Acc]
+ end
+ end,
+ [],
+ Accepted
+ ),
+ Accepted2 = case lists:member(DefaultEnc, Accepted1) of
+ true ->
+ Accepted1;
+ false ->
+ Accepted1 ++ [DefaultEnc]
+ end,
+ [E || E <- Accepted2, lists:member(E, SupportedEncs),
+ not lists:member(E, Refused1)].
+
+make_io(Atom) when is_atom(Atom) ->
+ atom_to_list(Atom);
+make_io(Integer) when is_integer(Integer) ->
+ integer_to_list(Integer);
+make_io(Io) when is_list(Io); is_binary(Io) ->
+ Io.
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+
+make_io_test() ->
+ ?assertEqual(
+ <<"atom">>,
+ iolist_to_binary(make_io(atom))),
+ ?assertEqual(
+ <<"20">>,
+ iolist_to_binary(make_io(20))),
+ ?assertEqual(
+ <<"list">>,
+ iolist_to_binary(make_io("list"))),
+ ?assertEqual(
+ <<"binary">>,
+ iolist_to_binary(make_io(<<"binary">>))),
+ ok.
+
+-record(test_record, {field1=f1, field2=f2}).
+record_to_proplist_test() ->
+ ?assertEqual(
+ [{'__record', test_record},
+ {field1, f1},
+ {field2, f2}],
+ record_to_proplist(#test_record{}, record_info(fields, test_record))),
+ ?assertEqual(
+ [{'typekey', test_record},
+ {field1, f1},
+ {field2, f2}],
+ record_to_proplist(#test_record{},
+ record_info(fields, test_record),
+ typekey)),
ok.
-test_shell_quote() ->
- "\"foo \\$bar\\\"\\`' baz\"" = shell_quote("foo $bar\"`' baz"),
+shell_quote_test() ->
+ ?assertEqual(
+ "\"foo \\$bar\\\"\\`' baz\"",
+ shell_quote("foo $bar\"`' baz")),
+ ok.
+
+cmd_port_test_spool(Port, Acc) ->
+ receive
+ {Port, eof} ->
+ Acc;
+ {Port, {data, {eol, Data}}} ->
+ cmd_port_test_spool(Port, ["\n", Data | Acc]);
+ {Port, Unknown} ->
+ throw({unknown, Unknown})
+ after 100 ->
+ throw(timeout)
+ end.
+
+cmd_port_test() ->
+ Port = cmd_port(["echo", "$bling$ `word`!"],
+ [eof, stream, {line, 4096}]),
+ Res = try lists:append(lists:reverse(cmd_port_test_spool(Port, [])))
+ after catch port_close(Port)
+ end,
+ self() ! {Port, wtf},
+ try cmd_port_test_spool(Port, [])
+ catch throw:{unknown, wtf} -> ok
+ end,
+ try cmd_port_test_spool(Port, [])
+ catch throw:timeout -> ok
+ end,
+ ?assertEqual(
+ "$bling$ `word`!\n",
+ Res).
+
+cmd_test() ->
+ ?assertEqual(
+ "$bling$ `word`!\n",
+ cmd(["echo", "$bling$ `word`!"])),
ok.
-test_cmd() ->
- "$bling$ `word`!\n" = cmd(["echo", "$bling$ `word`!"]),
+cmd_string_test() ->
+ ?assertEqual(
+ "\"echo\" \"\\$bling\\$ \\`word\\`!\"",
+ cmd_string(["echo", "$bling$ `word`!"])),
ok.
-test_cmd_string() ->
- "\"echo\" \"\\$bling\\$ \\`word\\`!\"" = cmd_string(["echo", "$bling$ `word`!"]),
+cmd_status_test() ->
+ ?assertEqual(
+ {0, <<"$bling$ `word`!\n">>},
+ cmd_status(["echo", "$bling$ `word`!"])),
ok.
-test_parse_header() ->
- {"multipart/form-data", [{"boundary", "AaB03x"}]} =
- parse_header("multipart/form-data; boundary=AaB03x"),
+
+parse_header_test() ->
+ ?assertEqual(
+ {"multipart/form-data", [{"boundary", "AaB03x"}]},
+ parse_header("multipart/form-data; boundary=AaB03x")),
+ %% This tests (currently) intentionally broken behavior
+ ?assertEqual(
+ {"multipart/form-data",
+ [{"b", ""},
+ {"cgi", "is"},
+ {"broken", "true\"e"}]},
+ parse_header("multipart/form-data;b=;cgi=\"i\\s;broken=true\"e;=z;z")),
ok.
-test_guess_mime() ->
+guess_mime_test() ->
"text/plain" = guess_mime(""),
"text/plain" = guess_mime(".text"),
"application/zip" = guess_mime(".zip"),
@@ -481,19 +682,22 @@ test_guess_mime() ->
"application/xhtml+xml" = guess_mime("x.xhtml"),
ok.
-test_path_split() ->
+path_split_test() ->
{"", "foo/bar"} = path_split("/foo/bar"),
{"foo", "bar"} = path_split("foo/bar"),
{"bar", ""} = path_split("bar"),
ok.
-test_urlsplit() ->
+urlsplit_test() ->
{"", "", "/foo", "", "bar?baz"} = urlsplit("/foo#bar?baz"),
{"http", "host:port", "/foo", "", "bar?baz"} =
urlsplit("http://host:port/foo#bar?baz"),
+ {"http", "host", "", "", ""} = urlsplit("http://host"),
+ {"", "", "/wiki/Category:Fruit", "", ""} =
+ urlsplit("/wiki/Category:Fruit"),
ok.
-test_urlsplit_path() ->
+urlsplit_path_test() ->
{"/foo/bar", "", ""} = urlsplit_path("/foo/bar"),
{"/foo", "baz", ""} = urlsplit_path("/foo?baz"),
{"/foo", "", "bar?baz"} = urlsplit_path("/foo#bar?baz"),
@@ -502,13 +706,13 @@ test_urlsplit_path() ->
{"/foo", "bar?baz", "baz"} = urlsplit_path("/foo?bar?baz#baz"),
ok.
-test_urlunsplit() ->
+urlunsplit_test() ->
"/foo#bar?baz" = urlunsplit({"", "", "/foo", "", "bar?baz"}),
"http://host:port/foo#bar?baz" =
urlunsplit({"http", "host:port", "/foo", "", "bar?baz"}),
ok.
-test_urlunsplit_path() ->
+urlunsplit_path_test() ->
"/foo/bar" = urlunsplit_path({"/foo/bar", "", ""}),
"/foo?baz" = urlunsplit_path({"/foo", "baz", ""}),
"/foo#bar?baz" = urlunsplit_path({"/foo", "", "bar?baz"}),
@@ -517,16 +721,28 @@ test_urlunsplit_path() ->
"/foo?bar?baz#baz" = urlunsplit_path({"/foo", "bar?baz", "baz"}),
ok.
-test_join() ->
- "foo,bar,baz" = join(["foo", "bar", "baz"], $,),
- "foo,bar,baz" = join(["foo", "bar", "baz"], ","),
- "foo bar" = join([["foo", " bar"]], ","),
- "foo bar,baz" = join([["foo", " bar"], "baz"], ","),
- "foo" = join(["foo"], ","),
- "foobarbaz" = join(["foo", "bar", "baz"], ""),
+join_test() ->
+ ?assertEqual("foo,bar,baz",
+ join(["foo", "bar", "baz"], $,)),
+ ?assertEqual("foo,bar,baz",
+ join(["foo", "bar", "baz"], ",")),
+ ?assertEqual("foo bar",
+ join([["foo", " bar"]], ",")),
+ ?assertEqual("foo bar,baz",
+ join([["foo", " bar"], "baz"], ",")),
+ ?assertEqual("foo",
+ join(["foo"], ",")),
+ ?assertEqual("foobarbaz",
+ join(["foo", "bar", "baz"], "")),
+ ?assertEqual("foo" ++ [<<>>] ++ "bar" ++ [<<>>] ++ "baz",
+ join(["foo", "bar", "baz"], <<>>)),
+ ?assertEqual("foobar" ++ [<<"baz">>],
+ join(["foo", "bar", <<"baz">>], "")),
+ ?assertEqual("",
+ join([], "any")),
ok.
-test_quote_plus() ->
+quote_plus_test() ->
"foo" = quote_plus(foo),
"1" = quote_plus(1),
"1.1" = quote_plus(1.1),
@@ -535,26 +751,45 @@ test_quote_plus() ->
"foo%0A" = quote_plus("foo\n"),
"foo%0A" = quote_plus("foo\n"),
"foo%3B%26%3D" = quote_plus("foo;&="),
+ "foo%3B%26%3D" = quote_plus(<<"foo;&=">>),
ok.
-test_unquote() ->
- "foo bar" = unquote("foo+bar"),
- "foo bar" = unquote("foo%20bar"),
- "foo\r\n" = unquote("foo%0D%0A"),
+unquote_test() ->
+ ?assertEqual("foo bar",
+ unquote("foo+bar")),
+ ?assertEqual("foo bar",
+ unquote("foo%20bar")),
+ ?assertEqual("foo\r\n",
+ unquote("foo%0D%0A")),
+ ?assertEqual("foo\r\n",
+ unquote(<<"foo%0D%0A">>)),
ok.
-test_urlencode() ->
+urlencode_test() ->
"foo=bar&baz=wibble+%0D%0A&z=1" = urlencode([{foo, "bar"},
{"baz", "wibble \r\n"},
{z, 1}]),
ok.
-test_parse_qs() ->
- [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}] =
- parse_qs("foo=bar&baz=wibble+%0D%0A&z=1"),
+parse_qs_test() ->
+ ?assertEqual(
+ [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}],
+ parse_qs("foo=bar&baz=wibble+%0D%0a&z=1")),
+ ?assertEqual(
+ [{"", "bar"}, {"baz", "wibble \r\n"}, {"z", ""}],
+ parse_qs("=bar&baz=wibble+%0D%0a&z=")),
+ ?assertEqual(
+ [{"foo", "bar"}, {"baz", "wibble \r\n"}, {"z", "1"}],
+ parse_qs(<<"foo=bar&baz=wibble+%0D%0a&z=1">>)),
+ ?assertEqual(
+ [],
+ parse_qs("")),
+ ?assertEqual(
+ [{"foo", ""}, {"bar", ""}, {"baz", ""}],
+ parse_qs("foo;bar&baz")),
ok.
-test_partition() ->
+partition_test() ->
{"foo", "", ""} = partition("foo", "/"),
{"foo", "/", "bar"} = partition("foo/bar", "/"),
{"foo", "/", ""} = partition("foo/", "/"),
@@ -562,7 +797,7 @@ test_partition() ->
{"f", "oo/ba", "r"} = partition("foo/bar", "oo/ba"),
ok.
-test_safe_relative_path() ->
+safe_relative_path_test() ->
"foo" = safe_relative_path("foo"),
"foo/" = safe_relative_path("foo/"),
"foo" = safe_relative_path("foo/bar/.."),
@@ -575,3 +810,164 @@ test_safe_relative_path() ->
undefined = safe_relative_path("foo/../.."),
undefined = safe_relative_path("foo//"),
ok.
+
+parse_qvalues_test() ->
+ [] = parse_qvalues(""),
+ [{"identity", 0.0}] = parse_qvalues("identity;q=0"),
+ [{"identity", 0.0}] = parse_qvalues("identity ;q=0"),
+ [{"identity", 0.0}] = parse_qvalues(" identity; q =0 "),
+ [{"identity", 0.0}] = parse_qvalues("identity ; q = 0"),
+ [{"identity", 0.0}] = parse_qvalues("identity ; q= 0.0"),
+ [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
+ "gzip,deflate,identity;q=0.0"
+ ),
+ [{"deflate", 1.0}, {"gzip", 1.0}, {"identity", 0.0}] = parse_qvalues(
+ "deflate,gzip,identity;q=0.0"
+ ),
+ [{"gzip", 1.0}, {"deflate", 1.0}, {"gzip", 1.0}, {"identity", 0.0}] =
+ parse_qvalues("gzip,deflate,gzip,identity;q=0"),
+ [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
+ "gzip, deflate , identity; q=0.0"
+ ),
+ [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
+ "gzip; q=1, deflate;q=1.0, identity;q=0.0"
+ ),
+ [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
+ "gzip; q=0.5, deflate;q=1.0, identity;q=0"
+ ),
+ [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 0.0}] = parse_qvalues(
+ "gzip; q=0.5, deflate , identity;q=0.0"
+ ),
+ [{"gzip", 0.5}, {"deflate", 0.8}, {"identity", 0.0}] = parse_qvalues(
+ "gzip; q=0.5, deflate;q=0.8, identity;q=0.0"
+ ),
+ [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 1.0}] = parse_qvalues(
+ "gzip; q=0.5,deflate,identity"
+ ),
+ [{"gzip", 0.5}, {"deflate", 1.0}, {"identity", 1.0}, {"identity", 1.0}] =
+ parse_qvalues("gzip; q=0.5,deflate,identity, identity "),
+ [{"text/html;level=1", 1.0}, {"text/plain", 0.5}] =
+ parse_qvalues("text/html;level=1, text/plain;q=0.5"),
+ [{"text/html;level=1", 0.3}, {"text/plain", 1.0}] =
+ parse_qvalues("text/html;level=1;q=0.3, text/plain"),
+ [{"text/html;level=1", 0.3}, {"text/plain", 1.0}] =
+ parse_qvalues("text/html; level = 1; q = 0.3, text/plain"),
+ [{"text/html;level=1", 0.3}, {"text/plain", 1.0}] =
+ parse_qvalues("text/html;q=0.3;level=1, text/plain"),
+ invalid_qvalue_string = parse_qvalues("gzip; q=1.1, deflate"),
+ invalid_qvalue_string = parse_qvalues("gzip; q=0.5, deflate;q=2"),
+ invalid_qvalue_string = parse_qvalues("gzip, deflate;q=AB"),
+ invalid_qvalue_string = parse_qvalues("gzip; q=2.1, deflate"),
+ invalid_qvalue_string = parse_qvalues("gzip; q=0.1234, deflate"),
+ invalid_qvalue_string = parse_qvalues("text/html;level=1;q=0.3, text/html;level"),
+ ok.
+
+pick_accepted_encodings_test() ->
+ ["identity"] = pick_accepted_encodings(
+ [],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 1.0}],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["identity"] = pick_accepted_encodings(
+ [{"gzip", 0.0}],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 1.0}, {"deflate", 1.0}],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 0.5}, {"deflate", 1.0}],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["identity"] = pick_accepted_encodings(
+ [{"gzip", 0.0}, {"deflate", 0.0}],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["gzip"] = pick_accepted_encodings(
+ [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}],
+ ["gzip", "identity"],
+ "identity"
+ ),
+ ["gzip", "deflate", "identity"] = pick_accepted_encodings(
+ [{"gzip", 1.0}, {"deflate", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "deflate"] = pick_accepted_encodings(
+ [{"gzip", 1.0}, {"deflate", 1.0}, {"identity", 0.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["deflate", "gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 0.2}, {"deflate", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["deflate", "deflate", "gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 0.2}, {"deflate", 1.0}, {"deflate", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["deflate", "gzip", "gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 0.2}, {"deflate", 1.0}, {"gzip", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "deflate", "gzip", "identity"] = pick_accepted_encodings(
+ [{"gzip", 0.2}, {"deflate", 0.9}, {"gzip", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ [] = pick_accepted_encodings(
+ [{"*", 0.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "deflate", "identity"] = pick_accepted_encodings(
+ [{"*", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "deflate", "identity"] = pick_accepted_encodings(
+ [{"*", 0.6}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip"] = pick_accepted_encodings(
+ [{"gzip", 1.0}, {"*", 0.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "deflate"] = pick_accepted_encodings(
+ [{"gzip", 1.0}, {"deflate", 0.6}, {"*", 0.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["deflate", "gzip"] = pick_accepted_encodings(
+ [{"gzip", 0.5}, {"deflate", 1.0}, {"*", 0.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "identity"] = pick_accepted_encodings(
+ [{"deflate", 0.0}, {"*", 1.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ["gzip", "identity"] = pick_accepted_encodings(
+ [{"*", 1.0}, {"deflate", 0.0}],
+ ["gzip", "deflate", "identity"],
+ "identity"
+ ),
+ ok.
+
+-endif.
diff --git a/src/mochiweb/reloader.erl b/src/mochiweb/reloader.erl
index 6835f8f9..c0f5de88 100644
--- a/src/mochiweb/reloader.erl
+++ b/src/mochiweb/reloader.erl
@@ -13,7 +13,9 @@
-export([start/0, start_link/0]).
-export([stop/0]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-
+-export([all_changed/0]).
+-export([is_changed/1]).
+-export([reload_modules/1]).
-record(state, {last, tref}).
%% External API
@@ -74,8 +76,37 @@ terminate(_Reason, State) ->
code_change(_Vsn, State, _Extra) ->
{ok, State}.
+%% @spec reload_modules([atom()]) -> [{module, atom()} | {error, term()}]
+%% @doc code:purge/1 and code:load_file/1 the given list of modules in order,
+%% return the results of code:load_file/1.
+reload_modules(Modules) ->
+ [begin code:purge(M), code:load_file(M) end || M <- Modules].
+
+%% @spec all_changed() -> [atom()]
+%% @doc Return a list of beam modules that have changed.
+all_changed() ->
+ [M || {M, Fn} <- code:all_loaded(), is_list(Fn), is_changed(M)].
+
+%% @spec is_changed(atom()) -> boolean()
+%% @doc true if the loaded module is a beam with a vsn attribute
+%% and does not match the on-disk beam file, returns false otherwise.
+is_changed(M) ->
+ try
+ module_vsn(M:module_info()) =/= module_vsn(code:get_object_code(M))
+ catch _:_ ->
+ false
+ end.
+
%% Internal API
+module_vsn({M, Beam, _Fn}) ->
+ {ok, {M, Vsn}} = beam_lib:version(Beam),
+ Vsn;
+module_vsn(L) when is_list(L) ->
+ {_, Attrs} = lists:keyfind(attributes, 1, L),
+ {_, Vsn} = lists:keyfind(vsn, 1, Attrs),
+ Vsn.
+
doit(From, To) ->
[case file:read_file_info(Filename) of
{ok, #file_info{mtime = Mtime}} when Mtime >= From, Mtime < To ->
@@ -121,3 +152,10 @@ reload(Module) ->
stamp() ->
erlang:localtime().
+
+%%
+%% Tests
+%%
+-include_lib("eunit/include/eunit.hrl").
+-ifdef(TEST).
+-endif.
diff --git a/test/etap/010-file-basics.t b/test/etap/010-file-basics.t
index 09b2f2b1..ed71f5e8 100755
--- a/test/etap/010-file-basics.t
+++ b/test/etap/010-file-basics.t
@@ -16,7 +16,7 @@ filename() -> test_util:build_file("test/etap/temp.010").
main(_) ->
test_util:init_code_path(),
- etap:plan(16),
+ etap:plan(19),
case (catch test()) of
ok ->
etap:end_tests();
@@ -67,11 +67,25 @@ test() ->
{ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
"Reading a term from a written binary term representation succeeds."),
+
+ BigBin = list_to_binary(lists:duplicate(100000, 0)),
+ {ok, BigBinPos} = couch_file:append_binary(Fd, BigBin),
+ etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos),
+ "Reading a large term from a written representation succeeds."),
+
+ ok = couch_file:write_header(Fd, hello),
+ etap:is({ok, hello}, couch_file:read_header(Fd),
+ "Reading a header succeeds."),
+
+ {ok, BigBinPos2} = couch_file:append_binary(Fd, BigBin),
+ etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2),
+ "Reading a large term from a written representation succeeds 2."),
% append_binary == append_iolist?
% Possible bug in pread_iolist or iolist() -> append_binary
{ok, IOLPos} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
- etap:is({ok, [<<"foombam">>]}, couch_file:pread_iolist(Fd, IOLPos),
+ {ok, IoList} = couch_file:pread_iolist(Fd, IOLPos),
+ etap:is(<<"foombam">>, iolist_to_binary(IoList),
"Reading an results in a binary form of the written iolist()"),
% XXX: How does on test fsync?
diff --git a/test/etap/021-btree-reductions.t b/test/etap/021-btree-reductions.t
index 3e19c767..331e49af 100755
--- a/test/etap/021-btree-reductions.t
+++ b/test/etap/021-btree-reductions.t
@@ -106,7 +106,7 @@ test()->
(_) -> false
end,
couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]),
- "Reducing foward over first half works with a startkey and endkey."
+ "Reducing forward over first half works with a startkey and endkey."
),
etap:fun_is(
@@ -115,7 +115,7 @@ test()->
(_) -> false
end,
couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]),
- "Reducing foward over second half works with second startkey and endkey"
+ "Reducing forward over second half works with second startkey and endkey"
),
etap:fun_is(
diff --git a/test/etap/030-doc-from-json.t b/test/etap/030-doc-from-json.t
index dc3327aa..c4ef649a 100755
--- a/test/etap/030-doc-from-json.t
+++ b/test/etap/030-doc-from-json.t
@@ -17,7 +17,11 @@
%% XXX: Figure out how to -include("couch_db.hrl")
-record(doc, {id= <<"">>, revs={0, []}, body={[]},
atts=[], deleted=false, meta=[]}).
--record(att, {name, type, len, md5= <<>>, revpos=0, data}).
+-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
+ encoding=identity}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
main(_) ->
test_util:init_code_path(),
@@ -32,6 +36,8 @@ main(_) ->
ok.
test() ->
+ couch_config:start_link([default_config()]),
+ couch_config:set("attachments", "compression_level", "0"),
ok = test_from_json_success(),
ok = test_from_json_errors(),
ok.
@@ -85,13 +91,17 @@ test_from_json_success() ->
name = <<"my_attachment.fu">>,
data = stub,
type = <<"application/awesome">>,
- len = 45
+ att_len = 45,
+ disk_len = 45,
+ revpos = nil
},
#att{
name = <<"noahs_private_key.gpg">>,
data = <<"I have a pet fish!">>,
type = <<"application/pgp-signature">>,
- len = 18
+ att_len = 18,
+ disk_len = 18,
+ revpos = 0
}
]},
"Attachments are parsed correctly."
diff --git a/test/etap/031-doc-to-json.t b/test/etap/031-doc-to-json.t
index 4e7a175f..605a6d00 100755
--- a/test/etap/031-doc-to-json.t
+++ b/test/etap/031-doc-to-json.t
@@ -17,7 +17,11 @@
%% XXX: Figure out how to -include("couch_db.hrl")
-record(doc, {id= <<"">>, revs={0, []}, body={[]},
atts=[], deleted=false, meta=[]}).
--record(att, {name, type, len, md5= <<>>, revpos=0, data}).
+-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
+ encoding=identity}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
main(_) ->
test_util:init_code_path(),
@@ -32,6 +36,8 @@ main(_) ->
ok.
test() ->
+ couch_config:start_link([default_config()]),
+ couch_config:set("attachments", "compression_level", "0"),
ok = test_to_json_success(),
ok.
@@ -72,8 +78,8 @@ test_to_json_success() ->
},
{
#doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}},
- {[{<<"_id">>, <<>>}, {<<"_deleted">>, true}]},
- "Deleted docs drop body members."
+ {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
+ "Deleted docs no longer drop body members."
},
{
#doc{meta=[
@@ -116,14 +122,16 @@ test_to_json_success() ->
type = <<"xml/sucks">>,
data = fun() -> ok end,
revpos = 1,
- len = 400
+ att_len = 400,
+ disk_len = 400
},
#att{
name = <<"fast.json">>,
type = <<"json/ftw">>,
data = <<"{\"so\": \"there!\"}">>,
revpos = 1,
- len = 16
+ att_len = 16,
+ disk_len = 16
}
]},
{[
@@ -153,7 +161,8 @@ test_to_json_success() ->
type = <<"text/plain">>,
data = fun() -> <<"diet pepsi">> end,
revpos = 1,
- len = 10
+ att_len = 10,
+ disk_len = 10
},
#att{
name = <<"food.now">>,
diff --git a/test/etap/040-util.t b/test/etap/040-util.t
index 6d6da2c1..8f80db87 100755
--- a/test/etap/040-util.t
+++ b/test/etap/040-util.t
@@ -17,7 +17,7 @@ main(_) ->
test_util:init_code_path(),
application:start(crypto),
- etap:plan(11),
+ etap:plan(14),
case (catch test()) of
ok ->
etap:end_tests();
@@ -35,29 +35,6 @@ test() ->
etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"),
"A list of atoms is one munged atom."),
- % terminate_linked
- Self = self(),
-
- spawn(fun() ->
- SecondSelf = self(),
- ChildPid = spawn_link(fun() ->
- SecondSelf ! {child, started},
- receive shutdown -> ok end
- end),
- PidUp = receive
- {child, started} -> ok
- after 1000 ->
- {error, timeout}
- end,
- etap:is(ok, PidUp, "Started a linked process."),
- couch_util:terminate_linked(normal),
- Self ! {pid, ChildPid}
- end),
- receive
- {pid, Pid} ->
- etap:ok(not is_process_alive(Pid), "Linked process was killed.")
- end,
-
% implode
etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"),
"use & as separator in list."),
@@ -88,4 +65,16 @@ test() ->
etap:ok(not couch_util:should_flush(),
"Checking to flush invokes GC."),
+ % verify
+ etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"),
+ "String comparison."),
+ etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"),
+ "String comparison (unequal lengths)."),
+ etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>),
+ "Binary comparison."),
+ etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>),
+ "Binary comparison (unequal lengths)."),
+ etap:is(false, couch_util:verify(nil, <<"ahBase3r">>),
+ "Binary comparison with atom."),
+
ok.
diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t
index 9324916c..d30b524a 100755
--- a/test/etap/050-stream.t
+++ b/test/etap/050-stream.t
@@ -42,8 +42,8 @@ test() ->
etap:is(ok, couch_stream:write(Stream, <<>>),
"Writing an empty binary does nothing."),
- {Ptrs, Length, _} = couch_stream:close(Stream),
- etap:is(Ptrs, [0], "Close returns the file pointers."),
+ {Ptrs, Length, _, _, _} = couch_stream:close(Stream),
+ etap:is(Ptrs, [{0, 8}], "Close returns the file pointers."),
etap:is(Length, 8, "Close also returns the number of bytes written."),
etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."),
@@ -58,8 +58,8 @@ test() ->
etap:is(ok, couch_stream:write(Stream2, ZeroBits),
"Successfully wrote 80 0 bits."),
- {Ptrs2, Length2, _} = couch_stream:close(Stream2),
- etap:is(Ptrs2, [ExpPtr], "Closing stream returns the file pointers."),
+ {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2),
+ etap:is(Ptrs2, [{ExpPtr, 20}], "Closing stream returns the file pointers."),
etap:is(Length2, 20, "Length written is 160 bytes."),
AllBits = iolist_to_binary([OneBits,ZeroBits]),
@@ -73,14 +73,14 @@ test() ->
couch_stream:write(Stream3, Data),
[Data | Acc]
end, [], lists:seq(1, 1024)),
- {Ptrs3, Length3, _} = couch_stream:close(Stream3),
+ {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3),
% 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold)
% + 5 puts us over the threshold
% + 4 bytes for the term_to_binary adding a length header
% + 1 byte every 4K for tail append headers
SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1,
- etap:is(Ptrs3, [ExpPtr2, SecondPtr], "Pointers every 4K bytes."),
+ etap:is(Ptrs3, [{ExpPtr2, 4100}, {SecondPtr, 1020}], "Pointers every 4K bytes."),
etap:is(Length3, 5120, "Wrote the expected 5K bytes."),
couch_file:close(Fd),
diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t
index d6b13d6d..5a8571ac 100755
--- a/test/etap/060-kt-merging.t
+++ b/test/etap/060-kt-merging.t
@@ -15,7 +15,7 @@
main(_) ->
test_util:init_code_path(),
- etap:plan(16),
+ etap:plan(12),
case (catch test()) of
ok ->
etap:end_tests();
@@ -26,113 +26,88 @@ main(_) ->
ok.
test() ->
- EmptyTree = [],
- One = [{0, {"1","foo",[]}}],
+ One = {0, {"1","foo",[]}},
TwoSibs = [{0, {"1","foo",[]}},
{0, {"2","foo",[]}}],
- OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
- TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
- TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", []}]}}],
- TwoChildSibs2 = [{0, {"1","foo", [{"1a", "bar", []},
- {"1b", "bar", [{"1bb", "boo", []}]}]}}],
- Stemmed1b = [{1, {"1a", "bar", []}}],
- Stemmed1a = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
- Stemmed1aa = [{2, {"1aa", "bar", []}}],
- Stemmed1bb = [{2, {"1bb", "boo", []}}],
-
- etap:is(
- {EmptyTree, no_conflicts},
- couch_key_tree:merge(EmptyTree, EmptyTree),
- "Merging two empty trees yields an empty tree."
- ),
-
- etap:is(
- {One, no_conflicts},
- couch_key_tree:merge(EmptyTree, One),
+ OneChild = {0, {"1","foo",[{"1a", "bar", []}]}},
+ TwoChild = {0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+ TwoChildSibs = {0, {"1","foo", [{"1a", "bar", []},
+ {"1b", "bar", []}]}},
+ TwoChildSibs2 = {0, {"1","foo", [{"1a", "bar", []},
+ {"1b", "bar", [{"1bb", "boo", []}]}]}},
+ Stemmed1b = {1, {"1a", "bar", []}},
+ Stemmed1a = {1, {"1a", "bar", [{"1aa", "bar", []}]}},
+ Stemmed1aa = {2, {"1aa", "bar", []}},
+ Stemmed1bb = {2, {"1bb", "boo", []}},
+
+ etap:is(
+ {[One], no_conflicts},
+ couch_key_tree:merge([], One),
"The empty tree is the identity for merge."
),
etap:is(
- {One, no_conflicts},
- couch_key_tree:merge(One, EmptyTree),
- "Merging is commutative."
- ),
-
- etap:is(
{TwoSibs, no_conflicts},
- couch_key_tree:merge(One, TwoSibs),
+ couch_key_tree:merge(TwoSibs, One),
"Merging a prefix of a tree with the tree yields the tree."
),
etap:is(
- {One, no_conflicts},
- couch_key_tree:merge(One, One),
+ {[One], no_conflicts},
+ couch_key_tree:merge([One], One),
"Merging is reflexive."
),
etap:is(
- {TwoChild, no_conflicts},
- couch_key_tree:merge(TwoChild, TwoChild),
+ {[TwoChild], no_conflicts},
+ couch_key_tree:merge([TwoChild], TwoChild),
"Merging two children is still reflexive."
),
etap:is(
- {TwoChildSibs, no_conflicts},
- couch_key_tree:merge(TwoChildSibs, TwoChildSibs),
+ {[TwoChildSibs], no_conflicts},
+ couch_key_tree:merge([TwoChildSibs], TwoChildSibs),
"Merging a tree to itself is itself."),
etap:is(
- {TwoChildSibs, no_conflicts},
- couch_key_tree:merge(TwoChildSibs, Stemmed1b),
+ {[TwoChildSibs], no_conflicts},
+ couch_key_tree:merge([TwoChildSibs], Stemmed1b),
"Merging a tree with a stem."
),
etap:is(
- {TwoChildSibs, no_conflicts},
- couch_key_tree:merge(Stemmed1b, TwoChildSibs),
- "Merging in the opposite direction."
- ),
-
- etap:is(
- {TwoChildSibs2, no_conflicts},
- couch_key_tree:merge(TwoChildSibs2, Stemmed1bb),
+ {[TwoChildSibs2], no_conflicts},
+ couch_key_tree:merge([TwoChildSibs2], Stemmed1bb),
"Merging a stem at a deeper level."
),
etap:is(
- {TwoChildSibs2, no_conflicts},
- couch_key_tree:merge(Stemmed1bb, TwoChildSibs2),
- "Merging a deeper level in opposite order."
- ),
-
- etap:is(
- {TwoChild, no_conflicts},
- couch_key_tree:merge(TwoChild, Stemmed1aa),
+ {[TwoChild], no_conflicts},
+ couch_key_tree:merge([TwoChild], Stemmed1aa),
"Merging a single tree with a deeper stem."
),
etap:is(
- {TwoChild, no_conflicts},
- couch_key_tree:merge(TwoChild, Stemmed1a),
+ {[TwoChild], no_conflicts},
+ couch_key_tree:merge([TwoChild], Stemmed1a),
"Merging a larger stem."
),
etap:is(
- {Stemmed1a, no_conflicts},
- couch_key_tree:merge(Stemmed1a, Stemmed1aa),
+ {[Stemmed1a], no_conflicts},
+ couch_key_tree:merge([Stemmed1a], Stemmed1aa),
"More merging."
),
- Expect1 = OneChild ++ Stemmed1aa,
+ Expect1 = [OneChild, Stemmed1aa],
etap:is(
{Expect1, conflicts},
- couch_key_tree:merge(OneChild, Stemmed1aa),
+ couch_key_tree:merge([OneChild], Stemmed1aa),
"Merging should create conflicts."
),
etap:is(
- {TwoChild, no_conflicts},
+ {[TwoChild], no_conflicts},
couch_key_tree:merge(Expect1, TwoChild),
"Merge should have no conflicts."
),
diff --git a/test/etap/070-couch-db.t b/test/etap/070-couch-db.t
index bf20dc0a..4b14aba6 100755
--- a/test/etap/070-couch-db.t
+++ b/test/etap/070-couch-db.t
@@ -41,6 +41,7 @@ test() ->
etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2),
"Database was deleted."),
+ gen_server:call(couch_server, {set_max_dbs_open, 3}),
MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end,
lists:foreach(fun(Int) ->
@@ -51,24 +52,24 @@ test() ->
end,
{ok, Db} = couch_db:create(MkDbName(Int), []),
ok = couch_db:close(Db)
- end, lists:seq(1, 200)),
+ end, lists:seq(1, 6)),
{ok, AllDbs3} = couch_server:all_databases(),
NumCreated = lists:foldl(fun(Int, Acc) ->
true = lists:member(MkDbName(Int), AllDbs3),
Acc+1
- end, 0, lists:seq(1, 200)),
- etap:is(200, NumCreated, "Created all databases."),
+ end, 0, lists:seq(1, 6)),
+ etap:is(6, NumCreated, "Created all databases."),
lists:foreach(fun(Int) ->
ok = couch_server:delete(MkDbName(Int), [])
- end, lists:seq(1, 200)),
+ end, lists:seq(1, 6)),
{ok, AllDbs4} = couch_server:all_databases(),
NumDeleted = lists:foldl(fun(Int, Acc) ->
false = lists:member(MkDbName(Int), AllDbs4),
Acc+1
- end, 0, lists:seq(1, 200)),
- etap:is(200, NumDeleted, "Deleted all databases."),
+ end, 0, lists:seq(1, 6)),
+ etap:is(6, NumDeleted, "Deleted all databases."),
ok.
diff --git a/test/etap/090-task-status.t b/test/etap/090-task-status.t
index b6ebbe4c..b278de7f 100755
--- a/test/etap/090-task-status.t
+++ b/test/etap/090-task-status.t
@@ -29,7 +29,7 @@ check_status(Pid,ListPropLists) ->
From = list_to_binary(pid_to_list(Pid)),
Element = lists:foldl(
fun(PropList,Acc) ->
- case proplists:get_value(pid,PropList) of
+ case couch_util:get_value(pid,PropList) of
From ->
[PropList | Acc];
_ ->
@@ -38,7 +38,7 @@ check_status(Pid,ListPropLists) ->
end,
[], ListPropLists
),
- proplists:get_value(status,hd(Element)).
+ couch_util:get_value(status,hd(Element)).
loop() ->
receive
diff --git a/test/etap/100-ref-counter.t b/test/etap/100-ref-counter.t
index 6f18d828..8f996d04 100755
--- a/test/etap/100-ref-counter.t
+++ b/test/etap/100-ref-counter.t
@@ -27,17 +27,14 @@ main(_) ->
loop() ->
receive
- {ping, From} ->
- From ! pong
+ close -> ok
end.
wait() ->
receive
- _ ->
- ok
- after
- 1000 ->
- throw(timeout_error)
+ {'DOWN', _, _, _, _} -> ok
+ after 1000 ->
+ throw(timeout_error)
end.
test() ->
@@ -94,11 +91,23 @@ test() ->
"Sanity checking that the Pid was re-added."
),
- ChildPid1 ! {ping, self()},
+ erlang:monitor(process, ChildPid1),
+ ChildPid1 ! close,
wait(),
- etap:is(
- couch_ref_counter:count(RefCtr),
- 1,
+
+ CheckFun = fun
+ (Iter, nil) ->
+ case couch_ref_counter:count(RefCtr) of
+ 1 -> Iter;
+ _ -> nil
+ end;
+ (_, Acc) ->
+ Acc
+ end,
+ Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)),
+ etap:isnt(
+ Result,
+ nil,
"The referer count was decremented automatically on process exit."
),
diff --git a/test/etap/110-replication-httpc.t b/test/etap/110-replication-httpc.t
index 492732bc..529239c5 100755
--- a/test/etap/110-replication-httpc.t
+++ b/test/etap/110-replication-httpc.t
@@ -19,7 +19,7 @@
auth = [],
resource = "",
headers = [
- {"User-Agent", "CouchDb/"++couch_server:get_version()},
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
{"Accept", "application/json"},
{"Accept-Encoding", "gzip"}
],
@@ -107,8 +107,8 @@ test_put() ->
method = put
},
{Resp} = couch_rep_httpc:request(Req),
- etap:ok(proplists:get_value(<<"ok">>, Resp), "ok:true on upload"),
- etap:is(<<"test_put">>, proplists:get_value(<<"id">>, Resp), "id is correct").
+ etap:ok(couch_util:get_value(<<"ok">>, Resp), "ok:true on upload"),
+ etap:is(<<"test_put">>, couch_util:get_value(<<"id">>, Resp), "id is correct").
test_qs() ->
Req = #http_db{
diff --git a/test/etap/111-replication-changes-feed.t b/test/etap/111-replication-changes-feed.t
index b03c1ac7..778b99dd 100755
--- a/test/etap/111-replication-changes-feed.t
+++ b/test/etap/111-replication-changes-feed.t
@@ -22,7 +22,7 @@
auth = [],
resource = "",
headers = [
- {"User-Agent", "CouchDb/"++couch_server:get_version()},
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
{"Accept", "application/json"},
{"Accept-Encoding", "gzip"}
],
@@ -153,11 +153,11 @@ test_deleted_conflicts(Type) ->
{ExpectProps} = generate_conflict(),
%% delete the conflict revision
- Id = proplists:get_value(<<"id">>, ExpectProps),
- [Win, {[{<<"rev">>, Lose}]}] = proplists:get_value(<<"changes">>, ExpectProps),
+ Id = couch_util:get_value(<<"id">>, ExpectProps),
+ [Win, {[{<<"rev">>, Lose}]}] = couch_util:get_value(<<"changes">>, ExpectProps),
Doc = couch_doc:from_json_obj({[
{<<"_id">>, Id},
- {<<"_rev">>, couch_doc:rev_to_str(Lose)},
+ {<<"_rev">>, Lose},
{<<"_deleted">>, true}
]}),
Db = get_db(),
@@ -167,7 +167,7 @@ test_deleted_conflicts(Type) ->
Expect = {[
{<<"seq">>, get_update_seq()},
{<<"id">>, Id},
- {<<"changes">>, [Win, {[{<<"rev">>, Rev}]}]}
+ {<<"changes">>, [Win, {[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]}
]},
{ok, Pid} = start_changes_feed(Type, Since, false),
@@ -210,7 +210,7 @@ generate_change(Id, EJson) ->
{[
{<<"seq">>, get_update_seq()},
{<<"id">>, Id},
- {<<"changes">>, [{[{<<"rev">>, Rev}]}]}
+ {<<"changes">>, [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]}
]}.
generate_conflict() ->
@@ -220,9 +220,9 @@ generate_conflict() ->
Doc2 = (couch_doc:from_json_obj({[<<"foo">>, <<"baz">>]}))#doc{id = Id},
{ok, Rev1} = couch_db:update_doc(Db, Doc1, [full_commit]),
{ok, Rev2} = couch_db:update_doc(Db, Doc2, [full_commit, all_or_nothing]),
-
+
%% relies on undocumented CouchDB conflict winner algo and revision sorting!
- RevList = [{[{<<"rev">>, R}]} || R
+ RevList = [{[{<<"rev">>, couch_doc:rev_to_str(R)}]} || R
<- lists:sort(fun(A,B) -> B<A end, [Rev1,Rev2])],
{[
{<<"seq">>, get_update_seq()},
diff --git a/test/etap/112-replication-missing-revs.t b/test/etap/112-replication-missing-revs.t
index 8aabfd37..71971088 100755
--- a/test/etap/112-replication-missing-revs.t
+++ b/test/etap/112-replication-missing-revs.t
@@ -23,7 +23,7 @@
auth = [],
resource = "",
headers = [
- {"User-Agent", "CouchDb/"++couch_server:get_version()},
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
{"Accept", "application/json"},
{"Accept-Encoding", "gzip"}
],
@@ -188,8 +188,21 @@ start_changes_feed(remote, Since, Continuous) ->
Db = #http_db{url = "http://127.0.0.1:5984/etap-test-source/"},
couch_rep_changes_feed:start_link(self(), Db, Since, Props).
+couch_rep_pid(Db) ->
+ spawn(fun() -> couch_rep_pid_loop(Db) end).
+
+couch_rep_pid_loop(Db) ->
+ receive
+ {'$gen_call', From, get_target_db} ->
+ gen_server:reply(From, {ok, Db})
+ end,
+ couch_rep_pid_loop(Db).
+
start_missing_revs(local, Changes) ->
- couch_rep_missing_revs:start_link(self(), get_db(target), Changes, []);
+ TargetDb = get_db(target),
+ MainPid = couch_rep_pid(TargetDb),
+ couch_rep_missing_revs:start_link(MainPid, TargetDb, Changes, []);
start_missing_revs(remote, Changes) ->
- Db = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"},
- couch_rep_missing_revs:start_link(self(), Db, Changes, []).
+ TargetDb = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"},
+ MainPid = couch_rep_pid(TargetDb),
+ couch_rep_missing_revs:start_link(MainPid, TargetDb, Changes, []).
diff --git a/test/etap/113-replication-attachment-comp.t b/test/etap/113-replication-attachment-comp.t
new file mode 100755
index 00000000..19c48fc6
--- /dev/null
+++ b/test/etap/113-replication-attachment-comp.t
@@ -0,0 +1,317 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+ name = null,
+ roles = [],
+ handler
+}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
+
+test_db_a_name() ->
+ <<"couch_test_rep_att_comp_a">>.
+
+test_db_b_name() ->
+ <<"couch_test_rep_att_comp_b">>.
+
+main(_) ->
+ test_util:init_code_path(),
+ etap:plan(45),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_server_sup:start_link([default_config()]),
+ put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+ put(port, couch_config:get("httpd", "port", "5984")),
+ application:start(inets),
+ ibrowse:start(),
+ timer:sleep(1000),
+
+ %
+ % test pull replication
+ %
+
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ create_db(test_db_a_name()),
+ create_db(test_db_b_name()),
+
+ % enable compression
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ % store doc with text attachment in DB A
+ put_text_att(test_db_a_name()),
+
+ % disable attachment compression
+ couch_config:set("attachments", "compression_level", "0"),
+
+ % do pull replication
+ do_pull_replication(test_db_a_name(), test_db_b_name()),
+
+ % verify that DB B has the attachment stored in compressed form
+ check_att_is_compressed(test_db_b_name()),
+ check_server_can_decompress_att(test_db_b_name()),
+ check_att_stubs(test_db_a_name(), test_db_b_name()),
+
+ %
+ % test push replication
+ %
+
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ create_db(test_db_a_name()),
+ create_db(test_db_b_name()),
+
+ % enable compression
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ % store doc with text attachment in DB A
+ put_text_att(test_db_a_name()),
+
+ % disable attachment compression
+ couch_config:set("attachments", "compression_level", "0"),
+
+ % do push replication
+ do_push_replication(test_db_a_name(), test_db_b_name()),
+
+ % verify that DB B has the attachment stored in compressed form
+ check_att_is_compressed(test_db_b_name()),
+ check_server_can_decompress_att(test_db_b_name()),
+ check_att_stubs(test_db_a_name(), test_db_b_name()),
+
+ %
+ % test local replication
+ %
+
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ create_db(test_db_a_name()),
+ create_db(test_db_b_name()),
+
+ % enable compression
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ % store doc with text attachment in DB A
+ put_text_att(test_db_a_name()),
+
+ % disable attachment compression
+ couch_config:set("attachments", "compression_level", "0"),
+
+ % do local-local replication
+ do_local_replication(test_db_a_name(), test_db_b_name()),
+
+ % verify that DB B has the attachment stored in compressed form
+ check_att_is_compressed(test_db_b_name()),
+ check_server_can_decompress_att(test_db_b_name()),
+ check_att_stubs(test_db_a_name(), test_db_b_name()),
+
+ timer:sleep(3000), % to avoid mochiweb socket closed exceptions
+ delete_db(test_db_a_name()),
+ delete_db(test_db_b_name()),
+ couch_server_sup:stop(),
+ ok.
+
+put_text_att(DbName) ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url(DbName) ++ "/testdoc1/readme.txt", [],
+ "text/plain", test_text_data()},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created text attachment"),
+ ok.
+
+do_pull_replication(SourceDbName, TargetDbName) ->
+ RepObj = {[
+ {<<"source">>, list_to_binary(db_url(SourceDbName))},
+ {<<"target">>, TargetDbName}
+ ]},
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ post,
+ {rep_url(), [],
+ "application/json", list_to_binary(couch_util:json_encode(RepObj))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Pull replication successfully triggered"),
+ Json = couch_util:json_decode(Body),
+ RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]),
+ etap:is(RepOk, true, "Pull replication completed with success"),
+ ok.
+
+do_push_replication(SourceDbName, TargetDbName) ->
+ RepObj = {[
+ {<<"source">>, SourceDbName},
+ {<<"target">>, list_to_binary(db_url(TargetDbName))}
+ ]},
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ post,
+ {rep_url(), [],
+ "application/json", list_to_binary(couch_util:json_encode(RepObj))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Push replication successfully triggered"),
+ Json = couch_util:json_decode(Body),
+ RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]),
+ etap:is(RepOk, true, "Push replication completed with success"),
+ ok.
+
+do_local_replication(SourceDbName, TargetDbName) ->
+ RepObj = {[
+ {<<"source">>, SourceDbName},
+ {<<"target">>, TargetDbName}
+ ]},
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ post,
+ {rep_url(), [],
+ "application/json", list_to_binary(couch_util:json_encode(RepObj))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Local replication successfully triggered"),
+ Json = couch_util:json_decode(Body),
+ RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]),
+ etap:is(RepOk, true, "Local replication completed with success"),
+ ok.
+
+check_att_is_compressed(DbName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url(DbName) ++ "/testdoc1/readme.txt",
+ [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code for the attachment request is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "The attachment was received in compressed form"),
+ Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))),
+ etap:is(
+ Uncompressed,
+ test_text_data(),
+ "The attachment content is valid after decompression at the client side"
+ ),
+ ok.
+
+check_server_can_decompress_att(DbName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url(DbName) ++ "/testdoc1/readme.txt", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code for the attachment request is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(
+ Gziped, false, "The attachment was not received in compressed form"
+ ),
+ etap:is(
+ Body,
+ test_text_data(),
+ "The attachment content is valid after server decompression"
+ ),
+ ok.
+
+check_att_stubs(SourceDbName, TargetDbName) ->
+ {ok, {{_, Code1, _}, _Headers1, Body1}} = http:request(
+ get,
+ {db_url(SourceDbName) ++ "/testdoc1?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code1,
+ 200,
+ "HTTP response code is 200 for the source DB doc request"
+ ),
+ Json1 = couch_util:json_decode(Body1),
+ SourceAttStub = couch_util:get_nested_json_value(
+ Json1,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ {ok, {{_, Code2, _}, _Headers2, Body2}} = http:request(
+ get,
+ {db_url(TargetDbName) ++ "/testdoc1?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code2,
+ 200,
+ "HTTP response code is 200 for the target DB doc request"
+ ),
+ Json2 = couch_util:json_decode(Body2),
+ TargetAttStub = couch_util:get_nested_json_value(
+ Json2,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ IdenticalStubs = (SourceAttStub =:= TargetAttStub),
+ etap:is(IdenticalStubs, true, "Attachment stubs are identical"),
+ TargetAttStubLength = couch_util:get_nested_json_value(
+ TargetAttStub,
+ [<<"length">>]
+ ),
+ TargetAttStubEnc = couch_util:get_nested_json_value(
+ TargetAttStub,
+ [<<"encoding">>]
+ ),
+ etap:is(
+ TargetAttStubEnc,
+ <<"gzip">>,
+ "Attachment stub has encoding property set to gzip"
+ ),
+ TargetAttStubEncLength = couch_util:get_nested_json_value(
+ TargetAttStub,
+ [<<"encoded_length">>]
+ ),
+ EncLengthDefined = is_integer(TargetAttStubEncLength),
+ etap:is(
+ EncLengthDefined,
+ true,
+ "Stubs have the encoded_length field properly defined"
+ ),
+ EncLengthSmaller = (TargetAttStubEncLength < TargetAttStubLength),
+ etap:is(
+ EncLengthSmaller,
+ true,
+ "Stubs have the encoded_length field smaller than their length field"
+ ),
+ ok.
+
+admin_user_ctx() ->
+ {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+create_db(DbName) ->
+ {ok, _} = couch_db:create(DbName, [admin_user_ctx()]).
+
+delete_db(DbName) ->
+ couch_server:delete(DbName, [admin_user_ctx()]).
+
+db_url(DbName) ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+ binary_to_list(DbName).
+
+rep_url() ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/_replicate".
+
+test_text_data() ->
+ {ok, Data} = file:read_file(test_util:source_file("README")),
+ binary_to_list(Data).
diff --git a/test/etap/121-stats-aggregates.t b/test/etap/121-stats-aggregates.t
index cd6b1430..d678aa9d 100755
--- a/test/etap/121-stats-aggregates.t
+++ b/test/etap/121-stats-aggregates.t
@@ -46,13 +46,13 @@ test_all_empty() ->
etap:is(length(Aggs), 2, "There are only two aggregate types in testing."),
etap:is(
- proplists:get_value(testing, Aggs),
+ couch_util:get_value(testing, Aggs),
{[{stuff, make_agg(<<"yay description">>,
null, null, null, null, null)}]},
"{testing, stuff} is empty at start."
),
etap:is(
- proplists:get_value(number, Aggs),
+ couch_util:get_value(number, Aggs),
{[{'11', make_agg(<<"randomosity">>,
null, null, null, null, null)}]},
"{number, '11'} is empty at start."
diff --git a/test/etap/130-attachments-md5.t b/test/etap/130-attachments-md5.t
index fe6732d6..4c40f83a 100755
--- a/test/etap/130-attachments-md5.t
+++ b/test/etap/130-attachments-md5.t
@@ -103,7 +103,7 @@ test_identity_with_valid_md5() ->
"PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
"Content-Type: text/plain\r\n",
"Content-Length: 34\r\n",
- "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n",
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
"\r\n",
AttData],
@@ -118,7 +118,7 @@ test_chunked_with_valid_md5_header() ->
"PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
"Content-Type: text/plain\r\n",
"Transfer-Encoding: chunked\r\n",
- "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n",
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
"\r\n",
to_hex(size(Part1)), "\r\n",
Part1, "\r\n",
@@ -145,7 +145,7 @@ test_chunked_with_valid_md5_trailer() ->
to_hex(size(Part2)), "\r\n",
Part2, "\r\n",
"0\r\n",
- "Content-MD5: ", base64:encode(erlang:md5(AttData)), "\r\n",
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
"\r\n"],
{Code, Json} = do_request(Data),
@@ -227,7 +227,7 @@ get_socket() ->
do_request(Request) ->
Sock = get_socket(),
gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
- timer:sleep(100),
+ timer:sleep(1000),
{ok, R} = gen_tcp:recv(Sock, 0),
gen_tcp:close(Sock),
[Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]),
diff --git a/test/etap/140-attachment-comp.t b/test/etap/140-attachment-comp.t
new file mode 100755
index 00000000..1a90bf0b
--- /dev/null
+++ b/test/etap/140-attachment-comp.t
@@ -0,0 +1,715 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
+
+test_db_name() ->
+ <<"couch_test_atts_compression">>.
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(78),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_server_sup:start_link([default_config()]),
+ put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+ put(port, couch_config:get("httpd", "port", "5984")),
+ application:start(inets),
+ timer:sleep(1000),
+ couch_server:delete(test_db_name(), []),
+ couch_db:create(test_db_name(), []),
+
+ couch_config:set("attachments", "compression_level", "8"),
+ couch_config:set("attachments", "compressible_types", "text/*"),
+
+ create_1st_text_att(),
+ create_1st_png_att(),
+ create_2nd_text_att(),
+ create_2nd_png_att(),
+
+ tests_for_1st_text_att(),
+ tests_for_1st_png_att(),
+ tests_for_2nd_text_att(),
+ tests_for_2nd_png_att(),
+
+ create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
+ test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
+
+ test_create_already_compressed_att_with_invalid_content_encoding(
+ db_url() ++ "/doc_att_deflate",
+ "readme.txt",
+ zlib:compress(test_text_data()),
+ "deflate"
+ ),
+
+ test_create_already_compressed_att_with_invalid_content_encoding(
+ db_url() ++ "/doc_att_compress",
+ "readme.txt",
+ % Note: As of OTP R13B04, it seems there's no LZW compression
+ % (i.e. UNIX compress utility implementation) lib in OTP.
+ % However there's a simple working Erlang implementation at:
+ % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
+ test_text_data(),
+ "compress"
+ ),
+
+ timer:sleep(3000), % to avoid mochiweb socket closed exceptions
+ couch_server:delete(test_db_name(), []),
+ couch_server_sup:stop(),
+ ok.
+
+db_url() ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+ binary_to_list(test_db_name()).
+
+create_1st_text_att() ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc1/readme.txt", [],
+ "text/plain", test_text_data()},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created text attachment using the standalone api"),
+ ok.
+
+create_1st_png_att() ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc2/icon.png", [],
+ "image/png", test_png_data()},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created png attachment using the standalone api"),
+ ok.
+
+% create a text attachment using the non-standalone attachment api
+create_2nd_text_att() ->
+ DocJson = {[
+ {<<"_attachments">>, {[
+ {<<"readme.txt">>, {[
+ {<<"content_type">>, <<"text/plain">>},
+ {<<"data">>, base64:encode(test_text_data())}
+ ]}
+ }]}}
+ ]},
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc3", [],
+ "application/json", list_to_binary(couch_util:json_encode(DocJson))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created text attachment using the non-standalone api"),
+ ok.
+
+% create a png attachment using the non-standalone attachment api
+create_2nd_png_att() ->
+ DocJson = {[
+ {<<"_attachments">>, {[
+ {<<"icon.png">>, {[
+ {<<"content_type">>, <<"image/png">>},
+ {<<"data">>, base64:encode(test_png_data())}
+ ]}
+ }]}}
+ ]},
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {db_url() ++ "/testdoc4", [],
+ "application/json", list_to_binary(couch_util:json_encode(DocJson))},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 201, "Created png attachment using the non-standalone api"),
+ ok.
+
+create_already_compressed_att(DocUri, AttName) ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {DocUri ++ "/" ++ AttName, [{"Content-Encoding", "gzip"}],
+ "text/plain", zlib:gzip(test_text_data())},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code,
+ 201,
+ "Created already compressed attachment using the standalone api"
+ ),
+ ok.
+
+tests_for_1st_text_att() ->
+ test_get_1st_text_att_with_accept_encoding_gzip(),
+ test_get_1st_text_att_without_accept_encoding_header(),
+ test_get_1st_text_att_with_accept_encoding_deflate(),
+ test_get_1st_text_att_with_accept_encoding_deflate_only(),
+ test_get_doc_with_1st_text_att(),
+ test_1st_text_att_stub().
+
+tests_for_1st_png_att() ->
+ test_get_1st_png_att_without_accept_encoding_header(),
+ test_get_1st_png_att_with_accept_encoding_gzip(),
+ test_get_1st_png_att_with_accept_encoding_deflate(),
+ test_get_doc_with_1st_png_att(),
+ test_1st_png_att_stub().
+
+tests_for_2nd_text_att() ->
+ test_get_2nd_text_att_with_accept_encoding_gzip(),
+ test_get_2nd_text_att_without_accept_encoding_header(),
+ test_get_doc_with_2nd_text_att(),
+ test_2nd_text_att_stub().
+
+tests_for_2nd_png_att() ->
+ test_get_2nd_png_att_without_accept_encoding_header(),
+ test_get_2nd_png_att_with_accept_encoding_gzip(),
+ test_get_doc_with_2nd_png_att(),
+ test_2nd_png_att_stub().
+
+test_get_1st_text_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "received body is gziped"),
+ Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))),
+ etap:is(
+ Uncompressed,
+ test_text_data(),
+ "received data for the 1st text attachment is ok"
+ ),
+ ok.
+
+test_get_1st_text_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_text_data(),
+ "received data for the 1st text attachment is ok"
+ ),
+ ok.
+
+test_get_1st_text_att_with_accept_encoding_deflate() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "deflate"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ Deflated = lists:member({"content-encoding", "deflate"}, Headers),
+ etap:is(Deflated, false, "received body is not deflated"),
+ etap:is(
+ Body,
+ test_text_data(),
+ "received data for the 1st text attachment is ok"
+ ),
+ ok.
+
+test_get_1st_text_att_with_accept_encoding_deflate_only() ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1/readme.txt",
+ [{"Accept-Encoding", "deflate, *;q=0"}]},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code,
+ 406,
+ "HTTP response code is 406 for an unsupported content encoding request"
+ ),
+ ok.
+
+test_get_1st_png_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2/icon.png", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 1st png attachment is ok"
+ ),
+ ok.
+
+test_get_1st_png_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 1st png attachment is ok"
+ ),
+ ok.
+
+test_get_1st_png_att_with_accept_encoding_deflate() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "deflate"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Deflated = lists:member({"content-encoding", "deflate"}, Headers),
+ etap:is(Deflated, false, "received body is not deflated"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 1st png attachment is ok"
+ ),
+ ok.
+
+test_get_doc_with_1st_text_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ TextAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttType = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"content_type">>]
+ ),
+ TextAttData = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"data">>]
+ ),
+ etap:is(
+ TextAttType,
+ <<"text/plain">>,
+ "1st text attachment has type text/plain"
+ ),
+ %% check the attachment's data is the base64 encoding of the plain text
+ %% and not the base64 encoding of the gziped plain text
+ etap:is(
+ TextAttData,
+ base64:encode(test_text_data()),
+ "1st text attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_1st_text_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc1?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {TextAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+ etap:is(
+ TextAttLength,
+ length(test_text_data()),
+ "1st text attachment stub length matches the uncompressed length"
+ ),
+ TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+ etap:is(
+ TextAttEncoding,
+ <<"gzip">>,
+ "1st text attachment stub has the encoding field set to gzip"
+ ),
+ TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+ etap:is(
+ TextAttEncLength,
+ iolist_size(zlib:gzip(test_text_data())),
+ "1st text attachment stub encoded_length matches the compressed length"
+ ),
+ ok.
+
+test_get_doc_with_1st_png_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ PngAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttType = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"content_type">>]
+ ),
+ PngAttData = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"data">>]
+ ),
+ etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
+ etap:is(
+ PngAttData,
+ base64:encode(test_png_data()),
+ "1st png attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_1st_png_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc2?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {PngAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
+ etap:is(
+ PngAttLength,
+ length(test_png_data()),
+ "1st png attachment stub length matches the uncompressed length"
+ ),
+ PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
+ etap:is(
+ PngEncoding,
+ undefined,
+ "1st png attachment stub doesn't have an encoding field"
+ ),
+ PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
+ etap:is(
+ PngEncLength,
+ undefined,
+ "1st png attachment stub doesn't have an encoded_length field"
+ ),
+ ok.
+
+test_get_2nd_text_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3/readme.txt", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "received body is gziped"),
+ Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))),
+ etap:is(
+ Uncompressed,
+ test_text_data(),
+ "received data for the 2nd text attachment is ok"
+ ),
+ ok.
+
+test_get_2nd_text_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3/readme.txt", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_text_data(),
+ "received data for the 2nd text attachment is ok"
+ ),
+ ok.
+
+test_get_2nd_png_att_without_accept_encoding_header() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4/icon.png", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 2nd png attachment is ok"
+ ),
+ ok.
+
+test_get_2nd_png_att_with_accept_encoding_gzip() ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4/icon.png", [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ Body,
+ test_png_data(),
+ "received data for the 2nd png attachment is ok"
+ ),
+ ok.
+
+test_get_doc_with_2nd_text_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ TextAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttType = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"content_type">>]
+ ),
+ TextAttData = couch_util:get_nested_json_value(
+ TextAttJson,
+ [<<"data">>]
+ ),
+ etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"),
+ %% check the attachment's data is the base64 encoding of the plain text
+ %% and not the base64 encoding of the gziped plain text
+ etap:is(
+ TextAttData,
+ base64:encode(test_text_data()),
+ "2nd text attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_2nd_text_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc3?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {TextAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"readme.txt">>]
+ ),
+ TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+ etap:is(
+ TextAttLength,
+ length(test_text_data()),
+ "2nd text attachment stub length matches the uncompressed length"
+ ),
+ TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+ etap:is(
+ TextAttEncoding,
+ <<"gzip">>,
+ "2nd text attachment stub has the encoding field set to gzip"
+ ),
+ TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+ etap:is(
+ TextAttEncLength,
+ iolist_size(zlib:gzip(test_text_data())),
+ "2nd text attachment stub encoded_length matches the compressed length"
+ ),
+ ok.
+
+test_get_doc_with_2nd_png_att() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4?attachments=true",
+ [{"Accept", "application/json"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ PngAttJson = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttType = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"content_type">>]
+ ),
+ PngAttData = couch_util:get_nested_json_value(
+ PngAttJson,
+ [<<"data">>]
+ ),
+ etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
+ etap:is(
+ PngAttData,
+ base64:encode(test_png_data()),
+ "2nd png attachment data is properly base64 encoded"
+ ),
+ ok.
+
+test_2nd_png_att_stub() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/testdoc4?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {PngAttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, <<"icon.png">>]
+ ),
+ PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
+ etap:is(
+ PngAttLength,
+ length(test_png_data()),
+ "2nd png attachment stub length matches the uncompressed length"
+ ),
+ PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
+ etap:is(
+ PngEncoding,
+ undefined,
+ "2nd png attachment stub doesn't have an encoding field"
+ ),
+ PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
+ etap:is(
+ PngEncLength,
+ undefined,
+ "2nd png attachment stub doesn't have an encoded_length field"
+ ),
+ ok.
+
+test_already_compressed_att(DocUri, AttName) ->
+ test_get_already_compressed_att_with_accept_gzip(DocUri, AttName),
+ test_get_already_compressed_att_without_accept(DocUri, AttName),
+ test_get_already_compressed_att_stub(DocUri, AttName).
+
+test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {DocUri ++ "/" ++ AttName, [{"Accept-Encoding", "gzip"}]},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, true, "received body is gziped"),
+ etap:is(
+ iolist_to_binary(Body),
+ iolist_to_binary(zlib:gzip(test_text_data())),
+ "received data for the already compressed attachment is ok"
+ ),
+ ok.
+
+test_get_already_compressed_att_without_accept(DocUri, AttName) ->
+ {ok, {{_, Code, _}, Headers, Body}} = http:request(
+ get,
+ {DocUri ++ "/" ++ AttName, []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Gziped = lists:member({"content-encoding", "gzip"}, Headers),
+ etap:is(Gziped, false, "received body is not gziped"),
+ etap:is(
+ iolist_to_binary(Body),
+ iolist_to_binary(test_text_data()),
+ "received data for the already compressed attachment is ok"
+ ),
+ ok.
+
+test_get_already_compressed_att_stub(DocUri, AttName) ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {DocUri ++ "?att_encoding_info=true", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "HTTP response code is 200"),
+ Json = couch_util:json_decode(Body),
+ {AttJson} = couch_util:get_nested_json_value(
+ Json,
+ [<<"_attachments">>, iolist_to_binary(AttName)]
+ ),
+ AttLength = couch_util:get_value(<<"length">>, AttJson),
+ etap:is(
+ AttLength,
+ iolist_size((zlib:gzip(test_text_data()))),
+ "Already compressed attachment stub length matches the "
+ "compressed length"
+ ),
+ Encoding = couch_util:get_value(<<"encoding">>, AttJson),
+ etap:is(
+ Encoding,
+ <<"gzip">>,
+ "Already compressed attachment stub has the encoding field set to gzip"
+ ),
+ EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
+ etap:is(
+ EncLength,
+ AttLength,
+ "Already compressed attachment stub encoded_length matches the "
+ "length field value"
+ ),
+ ok.
+
+test_create_already_compressed_att_with_invalid_content_encoding(
+ DocUri, AttName, AttData, Encoding) ->
+ {ok, {{_, Code, _}, _Headers, _Body}} = http:request(
+ put,
+ {DocUri ++ "/" ++ AttName, [{"Content-Encoding", Encoding}],
+ "text/plain", AttData},
+ [],
+ [{sync, true}]),
+ etap:is(
+ Code,
+ 415,
+ "Couldn't create an already compressed attachment using the "
+ "unsupported encoding '" ++ Encoding ++ "'"
+ ),
+ ok.
+
+test_png_data() ->
+ {ok, Data} = file:read_file(
+ test_util:source_file("share/www/image/logo.png")
+ ),
+ binary_to_list(Data).
+
+test_text_data() ->
+ {ok, Data} = file:read_file(
+ test_util:source_file("README")
+ ),
+ binary_to_list(Data).
diff --git a/test/etap/150-invalid-view-seq.t b/test/etap/150-invalid-view-seq.t
new file mode 100755
index 00000000..0664c116
--- /dev/null
+++ b/test/etap/150-invalid-view-seq.t
@@ -0,0 +1,192 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+ name = null,
+ roles = [],
+ handler
+}).
+
+default_config() ->
+ test_util:build_file("etc/couchdb/default_dev.ini").
+
+test_db_name() ->
+ <<"couch_test_invalid_view_seq">>.
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(10),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+%% NOTE: since during the test we stop the server,
+%% a huge and ugly but harmless stack trace is sent to stderr
+%%
+test() ->
+ couch_server_sup:start_link([default_config()]),
+ timer:sleep(1000),
+ delete_db(),
+ create_db(),
+
+ create_docs(),
+ create_design_doc(),
+
+ % make DB file backup
+ backup_db_file(),
+
+ put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+ put(port, couch_config:get("httpd", "port", "5984")),
+ application:start(inets),
+
+ create_new_doc(),
+ query_view_before_restore_backup(),
+
+ % restore DB file backup after querying view
+ restore_backup_db_file(),
+
+ query_view_after_restore_backup(),
+
+ delete_db(),
+ couch_server_sup:stop(),
+ ok.
+
+admin_user_ctx() ->
+ {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+create_db() ->
+ {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
+
+delete_db() ->
+ couch_server:delete(test_db_name(), [admin_user_ctx()]).
+
+create_docs() ->
+ {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+ Doc1 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc1">>},
+ {<<"value">>, 1}
+
+ ]}),
+ Doc2 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc2">>},
+ {<<"value">>, 2}
+
+ ]}),
+ Doc3 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc3">>},
+ {<<"value">>, 3}
+
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
+ couch_db:ensure_full_commit(Db),
+ couch_db:close(Db).
+
+create_design_doc() ->
+ {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+ DDoc = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"_design/foo">>},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>, {[
+ {<<"bar">>, {[
+ {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
+ ]}}
+ ]}}
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [DDoc]),
+ couch_db:ensure_full_commit(Db),
+ couch_db:close(Db).
+
+backup_db_file() ->
+ DbFile = test_util:build_file("tmp/lib/" ++
+ binary_to_list(test_db_name()) ++ ".couch"),
+ {ok, _} = file:copy(DbFile, DbFile ++ ".backup"),
+ ok.
+
+create_new_doc() ->
+ {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+ Doc666 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc666">>},
+ {<<"value">>, 999}
+
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [Doc666]),
+ couch_db:ensure_full_commit(Db),
+ couch_db:close(Db).
+
+db_url() ->
+ "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+ binary_to_list(test_db_name()).
+
+query_view_before_restore_backup() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/_design/foo/_view/bar", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Got view response before restoring backup."),
+ ViewJson = couch_util:json_decode(Body),
+ Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
+ HasDoc1 = has_doc("doc1", Rows),
+ HasDoc2 = has_doc("doc2", Rows),
+ HasDoc3 = has_doc("doc3", Rows),
+ HasDoc666 = has_doc("doc666", Rows),
+ etap:is(HasDoc1, true, "Before backup restore, view has doc1"),
+ etap:is(HasDoc2, true, "Before backup restore, view has doc2"),
+ etap:is(HasDoc3, true, "Before backup restore, view has doc3"),
+ etap:is(HasDoc666, true, "Before backup restore, view has doc666"),
+ ok.
+
+has_doc(DocId1, Rows) ->
+ DocId = iolist_to_binary(DocId1),
+ lists:any(
+ fun({R}) -> lists:member({<<"id">>, DocId}, R) end,
+ Rows
+ ).
+
+restore_backup_db_file() ->
+ couch_server_sup:stop(),
+ timer:sleep(3000),
+ DbFile = test_util:build_file("tmp/lib/" ++
+ binary_to_list(test_db_name()) ++ ".couch"),
+ ok = file:delete(DbFile),
+ ok = file:rename(DbFile ++ ".backup", DbFile),
+ couch_server_sup:start_link([default_config()]),
+ timer:sleep(1000),
+ ok.
+
+query_view_after_restore_backup() ->
+ {ok, {{_, Code, _}, _Headers, Body}} = http:request(
+ get,
+ {db_url() ++ "/_design/foo/_view/bar", []},
+ [],
+ [{sync, true}]),
+ etap:is(Code, 200, "Got view response after restoring backup."),
+ ViewJson = couch_util:json_decode(Body),
+ Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
+ HasDoc1 = has_doc("doc1", Rows),
+ HasDoc2 = has_doc("doc2", Rows),
+ HasDoc3 = has_doc("doc3", Rows),
+ HasDoc666 = has_doc("doc666", Rows),
+ etap:is(HasDoc1, true, "After backup restore, view has doc1"),
+ etap:is(HasDoc2, true, "After backup restore, view has doc2"),
+ etap:is(HasDoc3, true, "After backup restore, view has doc3"),
+ etap:is(HasDoc666, false, "After backup restore, view does not have doc666"),
+ ok.
diff --git a/test/etap/160-vhosts.t b/test/etap/160-vhosts.t
new file mode 100755
index 00000000..f4bd5e27
--- /dev/null
+++ b/test/etap/160-vhosts.t
@@ -0,0 +1,290 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+%% XXX: Figure out how to -include("couch_rep.hrl")
+-record(http_db, {
+ url,
+ auth = [],
+ resource = "",
+ headers = [
+ {"User-Agent", "CouchDB/"++couch_server:get_version()},
+ {"Accept", "application/json"},
+ {"Accept-Encoding", "gzip"}
+ ],
+ qs = [],
+ method = get,
+ body = nil,
+ options = [
+ {response_format,binary},
+ {inactivity_timeout, 30000}
+ ],
+ retries = 10,
+ pause = 1,
+ conn = nil
+}).
+
+-record(user_ctx, {
+ name = null,
+ roles = [],
+ handler
+}).
+
+server() -> "http://127.0.0.1:5984/".
+dbname() -> "etap-test-db".
+admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini",
+ "etc/couchdb/local_dev.ini"
+ ]).
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(14),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_server_sup:start_link(config_files()),
+ ibrowse:start(),
+ crypto:start(),
+
+ timer:sleep(1000),
+ couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
+ {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]),
+
+ Doc = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"doc1">>},
+ {<<"value">>, 666}
+ ]}),
+
+ Doc1 = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"_design/doc1">>},
+ {<<"shows">>, {[
+ {<<"test">>, <<"function(doc, req) {
+ return { json: {
+ requested_path: '/' + req.requested_path.join('/'),
+ path: '/' + req.path.join('/')
+ }};
+}">>}
+ ]}},
+ {<<"rewrites">>, [
+ {[
+ {<<"from">>, <<"/">>},
+ {<<"to">>, <<"_show/test">>}
+ ]}
+ ]}
+ ]}),
+
+ {ok, _} = couch_db:update_docs(Db, [Doc, Doc1]),
+
+ couch_db:ensure_full_commit(Db),
+
+ %% end boilerplate, start test
+
+ ok = couch_config:set("vhosts", "example.com", "/etap-test-db", false),
+ ok = couch_config:set("vhosts", "*.example.com",
+ "/etap-test-db/_design/doc1/_rewrite", false),
+ ok = couch_config:set("vhosts", "example.com/test", "/etap-test-db", false),
+ ok = couch_config:set("vhosts", "example1.com",
+ "/etap-test-db/_design/doc1/_rewrite/", false),
+ ok = couch_config:set("vhosts",":appname.:dbname.example1.com",
+ "/:dbname/_design/:appname/_rewrite/", false),
+ ok = couch_config:set("vhosts", ":dbname.example1.com", "/:dbname", false),
+
+ ok = couch_config:set("vhosts", "*.example2.com", "/*", false),
+ ok = couch_config:set("vhosts", "*/test", "/etap-test-db", false),
+ ok = couch_config:set("vhosts", "*.example2.com/test", "/*", false),
+ ok = couch_config:set("vhosts", "*/test1",
+ "/etap-test-db/_design/doc1/_show/test", false),
+
+ test_regular_request(),
+ test_vhost_request(),
+ test_vhost_request_with_qs(),
+ test_vhost_request_with_global(),
+ test_vhost_requested_path(),
+ test_vhost_requested_path_path(),
+ test_vhost_request_wildcard(),
+ test_vhost_request_replace_var(),
+ test_vhost_request_replace_var1(),
+ test_vhost_request_replace_wildcard(),
+ test_vhost_request_path(),
+ test_vhost_request_path1(),
+ test_vhost_request_path2(),
+ test_vhost_request_path3(),
+
+ %% restart boilerplate
+ couch_db:close(Db),
+ timer:sleep(3000),
+ couch_server_sup:stop(),
+
+ ok.
+
+test_regular_request() ->
+ Result = case ibrowse:send_req(server(), [], get, []) of
+ {ok, _, _, Body} ->
+ {[{<<"couchdb">>, <<"Welcome">>},
+ {<<"version">>,_}
+ ]} = couch_util:json_decode(Body),
+ etap:is(true, true, "should return server info");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_with_qs() ->
+ Url = server() ++ "doc1?revs_info=true",
+ case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body} ->
+ {JsonProps} = couch_util:json_decode(Body),
+ HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
+ etap:is(HasRevsInfo, true, "should return _revs_info");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_with_global() ->
+ Url2 = server() ++ "_utils/index.html",
+ case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body2} ->
+ "<!DOCTYPE" ++ _Foo = Body2,
+ etap:is(true, true, "should serve /_utils even inside vhosts");
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_requested_path() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"requested_path">>, Json) of
+ <<"/">> -> true;
+ _ -> false
+ end, true, <<"requested path in req ok">>);
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_requested_path_path() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"path in req ok">>);
+ _Else ->
+ etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_wildcard()->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "test.example.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"wildcard ok">>);
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+
+test_vhost_request_replace_var() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example1.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_replace_var1() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header, "doc1.etap-test-db.example1.com"}]) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"wildcard ok">>);
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_replace_wildcard() ->
+ case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path() ->
+ Uri = server() ++ "test",
+ case ibrowse:send_req(Uri, [], get, [], [{host_header, "example.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path1() ->
+ Url = server() ++ "test/doc1?revs_info=true",
+ case ibrowse:send_req(Url, [], get, [], []) of
+ {ok, _, _, Body} ->
+ {JsonProps} = couch_util:json_decode(Body),
+ HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
+ etap:is(HasRevsInfo, true, "should return _revs_info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path2() ->
+ Uri = server() ++ "test",
+ case ibrowse:send_req(Uri, [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
+ {ok, _, _, Body} ->
+ {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]}
+ = couch_util:json_decode(Body),
+ etap:is(true, true, "should return database info");
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
+
+test_vhost_request_path3() ->
+ Uri = server() ++ "test1",
+ case ibrowse:send_req(Uri, [], get, [], []) of
+ {ok, _, _, Body} ->
+ {Json} = couch_util:json_decode(Body),
+ etap:is(case proplists:get_value(<<"path">>, Json) of
+ <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+ _ -> false
+ end, true, <<"path in req ok">>);
+ _Else -> etap:is(false, true, <<"ibrowse fail">>)
+ end.
diff --git a/test/etap/170-os-daemons.es b/test/etap/170-os-daemons.es
new file mode 100755
index 00000000..73974e90
--- /dev/null
+++ b/test/etap/170-os-daemons.es
@@ -0,0 +1,26 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+loop() ->
+ loop(io:read("")).
+
+loop({ok, _}) ->
+ loop(io:read(""));
+loop(eof) ->
+ stop;
+loop({error, Reason}) ->
+ throw({error, Reason}).
+
+main([]) ->
+ loop().
diff --git a/test/etap/170-os-daemons.t b/test/etap/170-os-daemons.t
new file mode 100755
index 00000000..6feaa1bf
--- /dev/null
+++ b/test/etap/170-os-daemons.t
@@ -0,0 +1,114 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+daemon_cmd() ->
+ test_util:source_file("test/etap/170-os-daemons.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(49),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_os_daemons:start_link(),
+
+ etap:diag("Daemons boot after configuration added."),
+ couch_config:set("os_daemons", "foo", daemon_cmd(), false),
+ timer:sleep(1000),
+
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, "foo"),
+
+ % Check table form
+ {ok, Tab1} = couch_os_daemons:info(),
+ [T1] = ets:tab2list(Tab1),
+ check_daemon(T1, "foo"),
+
+ etap:diag("Daemons stop after configuration removed."),
+ couch_config:delete("os_daemons", "foo", false),
+ timer:sleep(500),
+
+ {ok, []} = couch_os_daemons:info([table]),
+ {ok, Tab2} = couch_os_daemons:info(),
+ etap:is(ets:tab2list(Tab2), [], "As table returns empty table."),
+
+ etap:diag("Adding multiple daemons causes both to boot."),
+ couch_config:set("os_daemons", "bar", daemon_cmd(), false),
+ couch_config:set("os_daemons", "baz", daemon_cmd(), false),
+ timer:sleep(500),
+ {ok, Daemons} = couch_os_daemons:info([table]),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, Daemons),
+
+ {ok, Tab3} = couch_os_daemons:info(),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, ets:tab2list(Tab3)),
+
+ etap:diag("Removing one daemon leaves the other alive."),
+ couch_config:delete("os_daemons", "bar", false),
+ timer:sleep(500),
+
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, "baz"),
+
+ % Check table version
+ {ok, Tab4} = couch_os_daemons:info(),
+ [T4] = ets:tab2list(Tab4),
+ check_daemon(T4, "baz"),
+
+ ok.
+
+check_daemon(D) ->
+ check_daemon(D, D#daemon.name).
+
+check_daemon(D, Name) ->
+ BaseName = "170-os-daemons.es",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.errors, [], "No errors occurred while booting."),
+ etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
diff --git a/test/etap/171-os-daemons-config.es b/test/etap/171-os-daemons-config.es
new file mode 100755
index 00000000..1f68ddc6
--- /dev/null
+++ b/test/etap/171-os-daemons-config.es
@@ -0,0 +1,83 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+filename() ->
+ list_to_binary(test_util:source_file("test/etap/171-os-daemons-config.es")).
+
+read() ->
+ case io:get_line('') of
+ eof ->
+ stop;
+ Data ->
+ couch_util:json_decode(Data)
+ end.
+
+write(Mesg) ->
+ Data = iolist_to_binary(couch_util:json_encode(Mesg)),
+ io:format(binary_to_list(Data) ++ "\n", []).
+
+get_cfg(Section) ->
+ write([<<"get">>, Section]),
+ read().
+
+get_cfg(Section, Name) ->
+ write([<<"get">>, Section, Name]),
+ read().
+
+log(Mesg) ->
+ write([<<"log">>, Mesg]).
+
+log(Mesg, Level) ->
+ write([<<"log">>, Mesg, {[{<<"level">>, Level}]}]).
+
+test_get_cfg1() ->
+ FileName = filename(),
+ {[{<<"foo">>, FileName}]} = get_cfg(<<"os_daemons">>).
+
+test_get_cfg2() ->
+ FileName = filename(),
+ FileName = get_cfg(<<"os_daemons">>, <<"foo">>),
+ <<"sequential">> = get_cfg(<<"uuids">>, <<"algorithm">>).
+
+test_get_unknown_cfg() ->
+ {[]} = get_cfg(<<"aal;3p4">>),
+ null = get_cfg(<<"aal;3p4">>, <<"313234kjhsdfl">>).
+
+test_log() ->
+ log(<<"foobar!">>),
+ log(<<"some stuff!">>, <<"debug">>),
+ log(2),
+ log(true),
+ write([<<"log">>, <<"stuff">>, 2]),
+ write([<<"log">>, 3, null]),
+ write([<<"log">>, [1, 2], {[{<<"level">>, <<"debug">>}]}]),
+ write([<<"log">>, <<"true">>, {[]}]).
+
+do_tests() ->
+ test_get_cfg1(),
+ test_get_cfg2(),
+ test_get_unknown_cfg(),
+ test_log(),
+ loop(io:read("")).
+
+loop({ok, _}) ->
+ loop(io:read(""));
+loop(eof) ->
+ init:stop();
+loop({error, _Reason}) ->
+ init:stop().
+
+main([]) ->
+ test_util:init_code_path(),
+ do_tests().
diff --git a/test/etap/171-os-daemons-config.t b/test/etap/171-os-daemons-config.t
new file mode 100755
index 00000000..e9dc3f32
--- /dev/null
+++ b/test/etap/171-os-daemons-config.t
@@ -0,0 +1,74 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+daemon_cmd() ->
+ test_util:source_file("test/etap/171-os-daemons-config.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(6),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_config:set("log", "level", "debug", false),
+ couch_log:start_link(),
+ couch_os_daemons:start_link(),
+
+ % "foo" is a required name by this test.
+ couch_config:set("os_daemons", "foo", daemon_cmd(), false),
+ timer:sleep(1000),
+
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, "foo"),
+
+ ok.
+
+check_daemon(D, Name) ->
+ BaseName = "171-os-daemons-config.es",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.errors, [], "No errors occurred while booting."),
+ etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
diff --git a/test/etap/172-os-daemon-errors.1.es b/test/etap/172-os-daemon-errors.1.es
new file mode 100644
index 00000000..a9defba1
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.1.es
@@ -0,0 +1,22 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+% Please do not make this file executable as that's the error being tested.
+
+loop() ->
+ timer:sleep(5000),
+ loop().
+
+main([]) ->
+ loop().
diff --git a/test/etap/172-os-daemon-errors.2.es b/test/etap/172-os-daemon-errors.2.es
new file mode 100755
index 00000000..52de0401
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.2.es
@@ -0,0 +1,16 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main([]) ->
+ init:stop().
diff --git a/test/etap/172-os-daemon-errors.3.es b/test/etap/172-os-daemon-errors.3.es
new file mode 100755
index 00000000..64229800
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.3.es
@@ -0,0 +1,17 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main([]) ->
+ timer:sleep(1000),
+ init:stop().
diff --git a/test/etap/172-os-daemon-errors.4.es b/test/etap/172-os-daemon-errors.4.es
new file mode 100755
index 00000000..577f3410
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.4.es
@@ -0,0 +1,17 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main([]) ->
+ timer:sleep(2000),
+ init:stop().
diff --git a/test/etap/172-os-daemon-errors.t b/test/etap/172-os-daemon-errors.t
new file mode 100755
index 00000000..287a0812
--- /dev/null
+++ b/test/etap/172-os-daemon-errors.t
@@ -0,0 +1,126 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+bad_perms() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.1.es").
+
+die_on_boot() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.2.es").
+
+die_quickly() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.3.es").
+
+can_reboot() ->
+ test_util:source_file("test/etap/172-os-daemon-errors.4.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(36),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_os_daemons:start_link(),
+
+ etap:diag("Daemon not executable."),
+ test_halts("foo", bad_perms(), 1000),
+
+ etap:diag("Daemon dies on boot."),
+ test_halts("bar", die_on_boot(), 1000),
+
+ etap:diag("Daemon dies quickly after boot."),
+ test_halts("baz", die_quickly(), 4000),
+
+ etap:diag("Daemon dies, but not quickly enough to be halted."),
+ test_runs("bam", can_reboot()),
+
+ ok.
+
+test_halts(Name, Cmd, Time) ->
+ couch_config:set("os_daemons", Name, Cmd ++ " 2> /dev/null", false),
+ timer:sleep(Time),
+ {ok, [D]} = couch_os_daemons:info([table]),
+ check_dead(D, Name, Cmd),
+ couch_config:delete("os_daemons", Name, false).
+
+test_runs(Name, Cmd) ->
+ couch_config:set("os_daemons", Name, Cmd, false),
+
+ timer:sleep(1000),
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, Name, Cmd, 0),
+
+ % Should reboot every two seconds. We're at 1s, so wait
+ % utnil 3s to be in the middle of the next invocation's
+ % life span.
+ timer:sleep(2000),
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, Name, Cmd, 1),
+
+ % If the kill command changed, that means we rebooted the process.
+ etap:isnt(D1#daemon.kill, D2#daemon.kill, "Kill command changed.").
+
+check_dead(D, Name, Cmd) ->
+ BaseName = filename:basename(Cmd) ++ " 2> /dev/null",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.status, halted, "Daemon has been halted."),
+ etap:is(D#daemon.errors, nil, "Errors have been disabled."),
+ etap:is(D#daemon.buf, nil, "Buffer has been switched off.").
+
+check_daemon(D, Name, Cmd, Errs) ->
+ BaseName = filename:basename(Cmd),
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.status, running, "Daemon still running."),
+ etap:is(length(D#daemon.errors), Errs, "Found expected number of errors."),
+ etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
+
diff --git a/test/etap/173-os-daemon-cfg-register.es b/test/etap/173-os-daemon-cfg-register.es
new file mode 100755
index 00000000..3d536dc7
--- /dev/null
+++ b/test/etap/173-os-daemon-cfg-register.es
@@ -0,0 +1,35 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+write(Mesg) ->
+ Data = iolist_to_binary(couch_util:json_encode(Mesg)),
+ io:format(binary_to_list(Data) ++ "\n", []).
+
+cfg_register(Section) ->
+ write([<<"register">>, Section]).
+
+cfg_register(Section, Key) ->
+ write([<<"register">>, Section, Key]).
+
+wait(_) ->
+ init:stop().
+
+do_tests() ->
+ cfg_register(<<"s1">>),
+ cfg_register(<<"s2">>, <<"k">>),
+ wait(io:read("")).
+
+main([]) ->
+ test_util:init_code_path(),
+ do_tests().
diff --git a/test/etap/173-os-daemon-cfg-register.t b/test/etap/173-os-daemon-cfg-register.t
new file mode 100755
index 00000000..3ee2969a
--- /dev/null
+++ b/test/etap/173-os-daemon-cfg-register.t
@@ -0,0 +1,98 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+config_files() ->
+ lists:map(fun test_util:build_file/1, [
+ "etc/couchdb/default_dev.ini"
+ ]).
+
+daemon_name() ->
+ "wheee".
+
+daemon_cmd() ->
+ test_util:source_file("test/etap/173-os-daemon-cfg-register.es").
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(27),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+ etap:bail(Other)
+ end,
+ ok.
+
+test() ->
+ couch_config:start_link(config_files()),
+ couch_os_daemons:start_link(),
+
+ DaemonCmd = daemon_cmd() ++ " 2> /dev/null",
+
+ etap:diag("Booting the daemon"),
+ couch_config:set("os_daemons", daemon_name(), DaemonCmd, false),
+ timer:sleep(1000),
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, running),
+
+ etap:diag("Daemon restarts when section changes."),
+ couch_config:set("s1", "k", "foo", false),
+ timer:sleep(1000),
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, running),
+ etap:isnt(D2#daemon.kill, D1#daemon.kill, "Kill command shows restart."),
+
+ etap:diag("Daemon doesn't restart for ignored section key."),
+ couch_config:set("s2", "k2", "baz", false),
+ timer:sleep(1000),
+ {ok, [D3]} = couch_os_daemons:info([table]),
+ etap:is(D3, D2, "Same daemon info after ignored config change."),
+
+ etap:diag("Daemon restarts for specific section/key pairs."),
+ couch_config:set("s2", "k", "bingo", false),
+ timer:sleep(1000),
+ {ok, [D4]} = couch_os_daemons:info([table]),
+ check_daemon(D4, running),
+ etap:isnt(D4#daemon.kill, D3#daemon.kill, "Kill command changed again."),
+
+ ok.
+
+check_daemon(D, Status) ->
+ BaseName = filename:basename(daemon_cmd()) ++ " 2> /dev/null",
+ BaseLen = length(BaseName),
+ CmdLen = length(D#daemon.cmd),
+ CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+ etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+ etap:is(D#daemon.name, daemon_name(), "Daemon name was set correctly."),
+ etap:is(CmdName, BaseName, "Command name was set correctly."),
+ etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+ etap:is(D#daemon.status, Status, "Daemon status is correct."),
+ etap:is(D#daemon.cfg_patterns, [{"s1"}, {"s2", "k"}], "Cfg patterns set"),
+ etap:is(D#daemon.errors, [], "No errors have occurred."),
+ etap:isnt(D#daemon.buf, nil, "Buffer is active.").
diff --git a/test/etap/180-http-proxy.ini b/test/etap/180-http-proxy.ini
new file mode 100644
index 00000000..72a63f66
--- /dev/null
+++ b/test/etap/180-http-proxy.ini
@@ -0,0 +1,20 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements. See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership. The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License. You may obtain a copy of the License at
+;
+; http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied. See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[httpd_global_handlers]
+_test = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:5985/">>}
+_error = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:5986/">>} \ No newline at end of file
diff --git a/test/etap/180-http-proxy.t b/test/etap/180-http-proxy.t
new file mode 100755
index 00000000..b91d901b
--- /dev/null
+++ b/test/etap/180-http-proxy.t
@@ -0,0 +1,357 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(req, {method=get, path="", headers=[], body="", opts=[]}).
+
+default_config() ->
+ [
+ test_util:build_file("etc/couchdb/default_dev.ini"),
+ test_util:source_file("test/etap/180-http-proxy.ini")
+ ].
+
+server() -> "http://127.0.0.1:5984/_test/".
+proxy() -> "http://127.0.0.1:5985/".
+external() -> "https://www.google.com/".
+
+main(_) ->
+ test_util:init_code_path(),
+
+ etap:plan(61),
+ case (catch test()) of
+ ok ->
+ etap:end_tests();
+ Other ->
+ etap:diag("Test died abnormally: ~p", [Other]),
+ etap:bail("Bad return value.")
+ end,
+ ok.
+
+check_request(Name, Req, Remote, Local) ->
+ case Remote of
+ no_remote -> ok;
+ _ -> test_web:set_assert(Remote)
+ end,
+ Url = case proplists:lookup(url, Req#req.opts) of
+ none -> server() ++ Req#req.path;
+ {url, DestUrl} -> DestUrl
+ end,
+ Opts = [{headers_as_is, true} | Req#req.opts],
+ Resp =ibrowse:send_req(
+ Url, Req#req.headers, Req#req.method, Req#req.body, Opts
+ ),
+ %etap:diag("ibrowse response: ~p", [Resp]),
+ case Local of
+ no_local -> ok;
+ _ -> etap:fun_is(Local, Resp, Name)
+ end,
+ case {Remote, Local} of
+ {no_remote, _} ->
+ ok;
+ {_, no_local} ->
+ ok;
+ _ ->
+ etap:is(test_web:check_last(), was_ok, Name ++ " - request handled")
+ end,
+ Resp.
+
+test() ->
+ couch_server_sup:start_link(default_config()),
+ ibrowse:start(),
+ crypto:start(),
+ test_web:start_link(),
+
+ test_basic(),
+ test_alternate_status(),
+ test_trailing_slash(),
+ test_passes_header(),
+ test_passes_host_header(),
+ test_passes_header_back(),
+ test_rewrites_location_headers(),
+ test_doesnt_rewrite_external_locations(),
+ test_rewrites_relative_location(),
+ test_uses_same_version(),
+ test_passes_body(),
+ test_passes_eof_body_back(),
+ test_passes_chunked_body(),
+ test_passes_chunked_body_back(),
+
+ test_connect_error(),
+
+ ok.
+
+test_basic() ->
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/" = Req:get(path),
+ undefined = Req:get(body_length),
+ undefined = Req:recv_body(),
+ {ok, {200, [{"Content-Type", "text/plain"}], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ check_request("Basic proxy test", #req{}, Remote, Local).
+
+test_alternate_status() ->
+ Remote = fun(Req) ->
+ "/alternate_status" = Req:get(path),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{path="alternate_status"},
+ check_request("Alternate status", Req, Remote, Local).
+
+test_trailing_slash() ->
+ Remote = fun(Req) ->
+ "/trailing_slash/" = Req:get(path),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{path="trailing_slash/"},
+ check_request("Trailing slash", Req, Remote, Local).
+
+test_passes_header() ->
+ Remote = fun(Req) ->
+ "/passes_header" = Req:get(path),
+ "plankton" = Req:get_header_value("X-CouchDB-Ralph"),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ path="passes_header",
+ headers=[{"X-CouchDB-Ralph", "plankton"}]
+ },
+ check_request("Passes header", Req, Remote, Local).
+
+test_passes_host_header() ->
+ Remote = fun(Req) ->
+ "/passes_host_header" = Req:get(path),
+ "www.google.com" = Req:get_header_value("Host"),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ path="passes_host_header",
+ headers=[{"Host", "www.google.com"}]
+ },
+ check_request("Passes host header", Req, Remote, Local).
+
+test_passes_header_back() ->
+ Remote = fun(Req) ->
+ "/passes_header_back" = Req:get(path),
+ {ok, {200, [{"X-CouchDB-Plankton", "ralph"}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", Headers, "ok"}) ->
+ lists:member({"X-CouchDB-Plankton", "ralph"}, Headers);
+ (_) ->
+ false
+ end,
+ Req = #req{path="passes_header_back"},
+ check_request("Passes header back", Req, Remote, Local).
+
+test_rewrites_location_headers() ->
+ etap:diag("Testing location header rewrites."),
+ do_rewrite_tests([
+ {"Location", proxy() ++ "foo/bar", server() ++ "foo/bar"},
+ {"Content-Location", proxy() ++ "bing?q=2", server() ++ "bing?q=2"},
+ {"Uri", proxy() ++ "zip#frag", server() ++ "zip#frag"},
+ {"Destination", proxy(), server()}
+ ]).
+
+test_doesnt_rewrite_external_locations() ->
+ etap:diag("Testing no rewrite of external locations."),
+ do_rewrite_tests([
+ {"Location", external() ++ "search", external() ++ "search"},
+ {"Content-Location", external() ++ "s?q=2", external() ++ "s?q=2"},
+ {"Uri", external() ++ "f#f", external() ++ "f#f"},
+ {"Destination", external() ++ "f?q=2#f", external() ++ "f?q=2#f"}
+ ]).
+
+test_rewrites_relative_location() ->
+ etap:diag("Testing relative rewrites."),
+ do_rewrite_tests([
+ {"Location", "/foo", server() ++ "foo"},
+ {"Content-Location", "bar", server() ++ "bar"},
+ {"Uri", "/zing?q=3", server() ++ "zing?q=3"},
+ {"Destination", "bing?q=stuff#yay", server() ++ "bing?q=stuff#yay"}
+ ]).
+
+do_rewrite_tests(Tests) ->
+ lists:foreach(fun({Header, Location, Url}) ->
+ do_rewrite_test(Header, Location, Url)
+ end, Tests).
+
+do_rewrite_test(Header, Location, Url) ->
+ Remote = fun(Req) ->
+ "/rewrite_test" = Req:get(path),
+ {ok, {302, [{Header, Location}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "302", Headers, "ok"}) ->
+ etap:is(
+ couch_util:get_value(Header, Headers),
+ Url,
+ "Header rewritten correctly."
+ ),
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{path="rewrite_test"},
+ Label = "Rewrite test for ",
+ check_request(Label ++ Header, Req, Remote, Local).
+
+test_uses_same_version() ->
+ Remote = fun(Req) ->
+ "/uses_same_version" = Req:get(path),
+ {1, 0} = Req:get(version),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ path="uses_same_version",
+ opts=[{http_vsn, {1, 0}}]
+ },
+ check_request("Uses same version", Req, Remote, Local).
+
+test_passes_body() ->
+ Remote = fun(Req) ->
+ 'PUT' = Req:get(method),
+ "/passes_body" = Req:get(path),
+ <<"Hooray!">> = Req:recv_body(),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ method=put,
+ path="passes_body",
+ body="Hooray!"
+ },
+ check_request("Passes body", Req, Remote, Local).
+
+test_passes_eof_body_back() ->
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/passes_eof_body" = Req:get(path),
+ {raw, {200, [{"Connection", "close"}], BodyChunks}}
+ end,
+ Local = fun({ok, "200", _, "foobarbazinga"}) -> true; (_) -> false end,
+ Req = #req{path="passes_eof_body"},
+ check_request("Passes eof body", Req, Remote, Local).
+
+test_passes_chunked_body() ->
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ Remote = fun(Req) ->
+ 'POST' = Req:get(method),
+ "/passes_chunked_body" = Req:get(path),
+ RecvBody = fun
+ ({Length, Chunk}, [Chunk | Rest]) ->
+ Length = size(Chunk),
+ Rest;
+ ({0, []}, []) ->
+ ok
+ end,
+ ok = Req:stream_body(1024*1024, RecvBody, BodyChunks),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+ Req = #req{
+ method=post,
+ path="passes_chunked_body",
+ headers=[{"Transfer-Encoding", "chunked"}],
+ body=mk_chunked_body(BodyChunks)
+ },
+ check_request("Passes chunked body", Req, Remote, Local).
+
+test_passes_chunked_body_back() ->
+ Name = "Passes chunked body back",
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/passes_chunked_body_back" = Req:get(path),
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ {chunked, {200, [{"Transfer-Encoding", "chunked"}], BodyChunks}}
+ end,
+ Req = #req{
+ path="passes_chunked_body_back",
+ opts=[{stream_to, self()}]
+ },
+
+ Resp = check_request(Name, Req, Remote, no_local),
+
+ etap:fun_is(
+ fun({ibrowse_req_id, _}) -> true; (_) -> false end,
+ Resp,
+ "Received an ibrowse request id."
+ ),
+ {_, ReqId} = Resp,
+
+ % Grab headers from response
+ receive
+ {ibrowse_async_headers, ReqId, "200", Headers} ->
+ etap:is(
+ proplists:get_value("Transfer-Encoding", Headers),
+ "chunked",
+ "Response included the Transfer-Encoding: chunked header"
+ ),
+ ibrowse:stream_next(ReqId)
+ after 1000 ->
+ throw({error, timeout})
+ end,
+
+ % Check body received
+ % TODO: When we upgrade to ibrowse >= 2.0.0 this check needs to
+ % check that the chunks returned are what we sent from the
+ % Remote test.
+ etap:diag("TODO: UPGRADE IBROWSE"),
+ etap:is(recv_body(ReqId, []), <<"foobarbazinga">>, "Decoded chunked body."),
+
+ % Check test_web server.
+ etap:is(test_web:check_last(), was_ok, Name ++ " - request handled").
+
+test_connect_error() ->
+ Local = fun({ok, "500", _Headers, _Body}) -> true; (_) -> false end,
+ Req = #req{opts=[{url, "http://127.0.0.1:5984/_error"}]},
+ check_request("Connect error", Req, no_remote, Local).
+
+
+mk_chunked_body(Chunks) ->
+ mk_chunked_body(Chunks, []).
+
+mk_chunked_body([], Acc) ->
+ iolist_to_binary(lists:reverse(Acc, "0\r\n\r\n"));
+mk_chunked_body([Chunk | Rest], Acc) ->
+ Size = to_hex(size(Chunk)),
+ mk_chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
+
+to_hex(Val) ->
+ to_hex(Val, []).
+
+to_hex(0, Acc) ->
+ Acc;
+to_hex(Val, Acc) ->
+ to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+
+recv_body(ReqId, Acc) ->
+ receive
+ {ibrowse_async_response, ReqId, Data} ->
+ recv_body(ReqId, [Data | Acc]);
+ {ibrowse_async_response_end, ReqId} ->
+ iolist_to_binary(lists:reverse(Acc));
+ Else ->
+ throw({error, unexpected_mesg, Else})
+ after 5000 ->
+ throw({error, timeout})
+ end.
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index d0f751f8..59d21cda 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -11,10 +11,10 @@
## the License.
noinst_SCRIPTS = run
-noinst_DATA = test_util.beam
+noinst_DATA = test_util.beam test_web.beam
%.beam: %.erl
- erlc $<
+ $(ERLC) $<
run: run.tpl
sed -e "s|%abs_top_srcdir%|@abs_top_srcdir@|g" \
@@ -27,6 +27,7 @@ DISTCLEANFILES = temp.*
EXTRA_DIST = \
run.tpl \
+ test_web.erl \
001-load.t \
002-icu-driver.t \
010-file-basics.t \
@@ -58,7 +59,25 @@ EXTRA_DIST = \
110-replication-httpc.t \
111-replication-changes-feed.t \
112-replication-missing-revs.t \
+ 113-replication-attachment-comp.t \
120-stats-collect.t \
121-stats-aggregates.cfg \
121-stats-aggregates.ini \
- 121-stats-aggregates.t
+ 121-stats-aggregates.t \
+ 130-attachments-md5.t \
+ 140-attachment-comp.t \
+ 150-invalid-view-seq.t \
+ 160-vhosts.t \
+ 170-os-daemons.es \
+ 170-os-daemons.t \
+ 171-os-daemons-config.es \
+ 171-os-daemons-config.t \
+ 172-os-daemon-errors.1.es \
+ 172-os-daemon-errors.2.es \
+ 172-os-daemon-errors.3.es \
+ 172-os-daemon-errors.4.es \
+ 172-os-daemon-errors.t \
+ 173-os-daemon-cfg-register.es \
+ 173-os-daemon-cfg-register.t \
+ 180-http-proxy.ini \
+ 180-http-proxy.t
diff --git a/test/etap/test_web.erl b/test/etap/test_web.erl
new file mode 100644
index 00000000..16438b31
--- /dev/null
+++ b/test/etap/test_web.erl
@@ -0,0 +1,99 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(test_web).
+-behaviour(gen_server).
+
+-export([start_link/0, loop/1, get_port/0, set_assert/1, check_last/0]).
+-export([init/1, terminate/2, code_change/3]).
+-export([handle_call/3, handle_cast/2, handle_info/2]).
+
+-define(SERVER, test_web_server).
+-define(HANDLER, test_web_handler).
+
+start_link() ->
+ gen_server:start({local, ?HANDLER}, ?MODULE, [], []),
+ mochiweb_http:start([
+ {name, ?SERVER},
+ {loop, {?MODULE, loop}},
+ {port, 5985}
+ ]).
+
+loop(Req) ->
+ %etap:diag("Handling request: ~p", [Req]),
+ case gen_server:call(?HANDLER, {check_request, Req}) of
+ {ok, RespInfo} ->
+ {ok, Req:respond(RespInfo)};
+ {raw, {Status, Headers, BodyChunks}} ->
+ Resp = Req:start_response({Status, Headers}),
+ lists:foreach(fun(C) -> Resp:send(C) end, BodyChunks),
+ erlang:put(mochiweb_request_force_close, true),
+ {ok, Resp};
+ {chunked, {Status, Headers, BodyChunks}} ->
+ Resp = Req:respond({Status, Headers, chunked}),
+ timer:sleep(500),
+ lists:foreach(fun(C) -> Resp:write_chunk(C) end, BodyChunks),
+ Resp:write_chunk([]),
+ {ok, Resp};
+ {error, Reason} ->
+ etap:diag("Error: ~p", [Reason]),
+ Body = lists:flatten(io_lib:format("Error: ~p", [Reason])),
+ {ok, Req:respond({200, [], Body})}
+ end.
+
+get_port() ->
+ mochiweb_socket_server:get(?SERVER, port).
+
+set_assert(Fun) ->
+ ok = gen_server:call(?HANDLER, {set_assert, Fun}).
+
+check_last() ->
+ gen_server:call(?HANDLER, last_status).
+
+init(_) ->
+ {ok, nil}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+handle_call({check_request, Req}, _From, State) when is_function(State, 1) ->
+ Resp2 = case (catch State(Req)) of
+ {ok, Resp} -> {reply, {ok, Resp}, was_ok};
+ {raw, Resp} -> {reply, {raw, Resp}, was_ok};
+ {chunked, Resp} -> {reply, {chunked, Resp}, was_ok};
+ Error -> {reply, {error, Error}, not_ok}
+ end,
+ Req:cleanup(),
+ Resp2;
+handle_call({check_request, _Req}, _From, _State) ->
+ {reply, {error, no_assert_function}, not_ok};
+handle_call(last_status, _From, State) when is_atom(State) ->
+ {reply, State, nil};
+handle_call(last_status, _From, State) ->
+ {reply, {error, not_checked}, State};
+handle_call({set_assert, Fun}, _From, nil) ->
+ {reply, ok, Fun};
+handle_call({set_assert, _}, _From, State) ->
+ {reply, {error, assert_function_set}, State};
+handle_call(Msg, _From, State) ->
+ {reply, {ignored, Msg}, State}.
+
+handle_cast(Msg, State) ->
+ etap:diag("Ignoring cast message: ~p", [Msg]),
+ {noreply, State}.
+
+handle_info(Msg, State) ->
+ etap:diag("Ignoring info message: ~p", [Msg]),
+ {noreply, State}.
+
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
diff --git a/test/javascript/couch_http.js b/test/javascript/couch_http.js
index f92cf119..5f4716d2 100644
--- a/test/javascript/couch_http.js
+++ b/test/javascript/couch_http.js
@@ -15,11 +15,15 @@
if(typeof(CouchHTTP) != "undefined") {
CouchHTTP.prototype.open = function(method, url, async) {
- if(/^\s*http:\/\//.test(url)) {
- return this._open(method, url, async);
- } else {
- return this._open(method, this.base_url + url, async);
+ if(!/^\s*http:\/\//.test(url)) {
+ if(/^[^\/]/.test(url)) {
+ url = this.base_url + "/" + url;
+ } else {
+ url = this.base_url + url;
+ }
}
+
+ return this._open(method, url, async);
};
CouchHTTP.prototype.setRequestHeader = function(name, value) {
@@ -52,6 +56,7 @@
}
})();
+CouchDB.urlPrefix = "";
CouchDB.newXhr = function() {
return new CouchHTTP();
};
diff --git a/test/view_server/query_server_spec.rb b/test/view_server/query_server_spec.rb
index 1de8e5bc..de1df5c1 100644
--- a/test/view_server/query_server_spec.rb
+++ b/test/view_server/query_server_spec.rb
@@ -139,7 +139,7 @@ functions = {
"js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}},
"erlang" => <<-ERLANG
fun({Doc}) ->
- A = proplists:get_value(<<"a">>, Doc, null),
+ A = couch_util:get_value(<<"a">>, Doc, null),
Emit(<<"foo">>, A),
Emit(<<"bar">>, A)
end.
@@ -153,7 +153,7 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({Doc}) ->
- A = proplists:get_value(<<"a">>, Doc, null),
+ A = couch_util:get_value(<<"a">>, Doc, null),
Emit(<<"baz">>, A)
end.
ERLANG
@@ -175,7 +175,7 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({NewDoc}, _OldDoc, _UserCtx) ->
- case proplists:get_value(<<"bad">>, NewDoc) of
+ case couch_util:get_value(<<"bad">>, NewDoc) of
undefined -> 1;
_ -> {[{forbidden, <<"bad doc">>}]}
end
@@ -191,8 +191,8 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({Doc}, Req) ->
- Title = proplists:get_value(<<"title">>, Doc),
- Body = proplists:get_value(<<"body">>, Doc),
+ Title = couch_util:get_value(<<"title">>, Doc),
+ Body = couch_util:get_value(<<"body">>, Doc),
Resp = <<Title/binary, " - ", Body/binary>>,
{[{<<"body">>, Resp}]}
end.
@@ -208,8 +208,8 @@ functions = {
JS
"erlang" => <<-ERLANG
fun({Doc}, Req) ->
- Title = proplists:get_value(<<"title">>, Doc),
- Body = proplists:get_value(<<"body">>, Doc),
+ Title = couch_util:get_value(<<"title">>, Doc),
+ Body = couch_util:get_value(<<"body">>, Doc),
Resp = <<Title/binary, " - ", Body/binary>>,
{[
{<<"code">>, 200},
@@ -256,9 +256,9 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, nil}
end,
{ok, _} = FoldRows(Fun, nil),
@@ -283,7 +283,7 @@ functions = {
fun(Head, Req) ->
Send(<<"bacon">>),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
Send(<<"eggs">>),
{ok, nil}
end,
@@ -307,9 +307,9 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, nil}
end,
FoldRows(Fun, nil),
@@ -335,13 +335,13 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun
({Row}, Count) when Count < 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, Count+1};
({Row}, Count) when Count == 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{stop, <<"early tail">>}
end,
{ok, Tail} = FoldRows(Fun, 0),
@@ -380,10 +380,10 @@ functions = {
Send(<<"bacon">>),
Fun = fun
({Row}, Count) when Count < 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, Count+1};
({Row}, Count) when Count == 2 ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{stop, <<"early">>}
end,
{ok, Tail} = FoldRows(Fun, 0),
@@ -408,9 +408,9 @@ functions = {
"erlang" => <<-ERLANG,
fun(Head, {Req}) ->
Send(<<"first chunk">>),
- Send(proplists:get_value(<<"q">>, Req)),
+ Send(couch_util:get_value(<<"q">>, Req)),
Fun = fun({Row}, _) ->
- Send(proplists:get_value(<<"key">>, Row)),
+ Send(couch_util:get_value(<<"key">>, Row)),
{ok, nil}
end,
FoldRows(Fun, nil),
@@ -428,7 +428,7 @@ functions = {
JS
"erlang" => <<-ERLANG,
fun({Doc}, Req) ->
- proplists:get_value(<<"good">>, Doc)
+ couch_util:get_value(<<"good">>, Doc)
end.
ERLANG
},