From 6d18638d747374cb0e90e9bcbefbc71d959254fd Mon Sep 17 00:00:00 2001 From: Brad Anderson Date: Wed, 18 Aug 2010 16:16:09 -0400 Subject: clear out root folder a bit, moving couch bits into apps/couch or rel/ --- AUTHORS | 20 - BUGS | 6 - CHANGES | 582 --------------- DEVELOPERS | 95 --- INSTALL.Unix | 231 ------ INSTALL.Windows | 148 ---- LICENSE | 371 ---------- NEWS | 276 ------- NOTICE | 51 -- README | 81 -- THANKS | 69 -- apps/couch/AUTHORS | 20 + apps/couch/BUGS | 6 + apps/couch/CHANGES | 582 +++++++++++++++ apps/couch/DEVELOPERS | 95 +++ apps/couch/INSTALL.Unix | 231 ++++++ apps/couch/INSTALL.Windows | 148 ++++ apps/couch/LICENSE | 371 ++++++++++ apps/couch/NEWS | 276 +++++++ apps/couch/NOTICE | 51 ++ apps/couch/README | 81 ++ apps/couch/THANKS | 69 ++ apps/couch/license.skip | 107 +++ apps/couch/test/bench/bench_marks.js | 103 +++ apps/couch/test/bench/benchbulk.sh | 69 ++ apps/couch/test/bench/run.tpl | 28 + apps/couch/test/etap/001-load.t | 68 ++ apps/couch/test/etap/002-icu-driver.t | 33 + apps/couch/test/etap/010-file-basics.t | 107 +++ apps/couch/test/etap/011-file-headers.t | 145 ++++ apps/couch/test/etap/020-btree-basics.t | 205 +++++ apps/couch/test/etap/021-btree-reductions.t | 141 ++++ apps/couch/test/etap/030-doc-from-json.t | 239 ++++++ apps/couch/test/etap/031-doc-to-json.t | 200 +++++ apps/couch/test/etap/040-util.t | 80 ++ apps/couch/test/etap/041-uuid-gen-seq.ini | 19 + apps/couch/test/etap/041-uuid-gen-utc.ini | 19 + apps/couch/test/etap/041-uuid-gen.t | 118 +++ apps/couch/test/etap/050-stream.t | 87 +++ apps/couch/test/etap/060-kt-merging.t | 140 ++++ apps/couch/test/etap/061-kt-missing-leaves.t | 65 ++ apps/couch/test/etap/062-kt-remove-leaves.t | 69 ++ apps/couch/test/etap/063-kt-get-leaves.t | 98 +++ apps/couch/test/etap/064-kt-counting.t | 46 ++ apps/couch/test/etap/065-kt-stemming.t | 42 ++ apps/couch/test/etap/070-couch-db.t | 75 ++ apps/couch/test/etap/080-config-get-set.t | 128 ++++ apps/couch/test/etap/081-config-override.1.ini | 22 + apps/couch/test/etap/081-config-override.2.ini | 22 + apps/couch/test/etap/081-config-override.t | 212 ++++++ apps/couch/test/etap/082-config-register.t | 94 +++ apps/couch/test/etap/083-config-no-files.t | 55 ++ apps/couch/test/etap/090-task-status.t | 209 ++++++ apps/couch/test/etap/100-ref-counter.t | 114 +++ apps/couch/test/etap/110-replication-httpc.t | 134 ++++ .../couch/test/etap/111-replication-changes-feed.t | 254 +++++++ .../couch/test/etap/112-replication-missing-revs.t | 195 +++++ .../test/etap/113-replication-attachment-comp.t | 273 +++++++ apps/couch/test/etap/120-stats-collect.t | 150 ++++ apps/couch/test/etap/121-stats-aggregates.cfg | 19 + apps/couch/test/etap/121-stats-aggregates.ini | 20 + apps/couch/test/etap/121-stats-aggregates.t | 171 +++++ apps/couch/test/etap/130-attachments-md5.t | 252 +++++++ apps/couch/test/etap/140-attachment-comp.t | 711 ++++++++++++++++++ apps/couch/test/etap/150-invalid-view-seq.t | 192 +++++ apps/couch/test/etap/160-vhosts.t | 131 ++++ apps/couch/test/etap/run.tpl | 27 + apps/couch/test/etap/test_util.erl.in | 35 + apps/couch/test/javascript/cli_runner.js | 52 ++ apps/couch/test/javascript/couch_http.js | 62 ++ apps/couch/test/javascript/run.tpl | 30 + apps/couch/test/view_server/query_server_spec.rb | 824 +++++++++++++++++++++ apps/couch/test/view_server/run_native_process.es | 59 ++ license.skip | 107 --- rebar.config | 14 - rel/overlay/var/share/server/filter.js | 23 + rel/overlay/var/share/server/json2.js | 481 ++++++++++++ rel/overlay/var/share/server/loop.js | 140 ++++ rel/overlay/var/share/server/mimeparse.js | 158 ++++ rel/overlay/var/share/server/render.js | 352 +++++++++ rel/overlay/var/share/server/state.js | 27 + rel/overlay/var/share/server/util.js | 112 +++ rel/overlay/var/share/server/validate.js | 22 + rel/overlay/var/share/server/views.js | 137 ++++ share/server/filter.js | 23 - share/server/json2.js | 481 ------------ share/server/loop.js | 140 ---- share/server/mimeparse.js | 158 ---- share/server/render.js | 352 --------- share/server/state.js | 27 - share/server/util.js | 112 --- share/server/validate.js | 22 - share/server/views.js | 137 ---- test/bench/bench_marks.js | 103 --- test/bench/benchbulk.sh | 69 -- test/bench/run.tpl | 28 - test/etap/001-load.t | 68 -- test/etap/002-icu-driver.t | 33 - test/etap/010-file-basics.t | 107 --- test/etap/011-file-headers.t | 145 ---- test/etap/020-btree-basics.t | 205 ----- test/etap/021-btree-reductions.t | 141 ---- test/etap/030-doc-from-json.t | 239 ------ test/etap/031-doc-to-json.t | 200 ----- test/etap/040-util.t | 80 -- test/etap/041-uuid-gen-seq.ini | 19 - test/etap/041-uuid-gen-utc.ini | 19 - test/etap/041-uuid-gen.t | 118 --- test/etap/050-stream.t | 87 --- test/etap/060-kt-merging.t | 140 ---- test/etap/061-kt-missing-leaves.t | 65 -- test/etap/062-kt-remove-leaves.t | 69 -- test/etap/063-kt-get-leaves.t | 98 --- test/etap/064-kt-counting.t | 46 -- test/etap/065-kt-stemming.t | 42 -- test/etap/070-couch-db.t | 75 -- test/etap/080-config-get-set.t | 128 ---- test/etap/081-config-override.1.ini | 22 - test/etap/081-config-override.2.ini | 22 - test/etap/081-config-override.t | 212 ------ test/etap/082-config-register.t | 94 --- test/etap/083-config-no-files.t | 55 -- test/etap/090-task-status.t | 209 ------ test/etap/100-ref-counter.t | 114 --- test/etap/110-replication-httpc.t | 134 ---- test/etap/111-replication-changes-feed.t | 254 ------- test/etap/112-replication-missing-revs.t | 195 ----- test/etap/113-replication-attachment-comp.t | 273 ------- test/etap/120-stats-collect.t | 150 ---- test/etap/121-stats-aggregates.cfg | 19 - test/etap/121-stats-aggregates.ini | 20 - test/etap/121-stats-aggregates.t | 171 ----- test/etap/130-attachments-md5.t | 252 ------- test/etap/140-attachment-comp.t | 711 ------------------ test/etap/150-invalid-view-seq.t | 192 ----- test/etap/160-vhosts.t | 131 ---- test/etap/run.tpl | 27 - test/etap/test_util.erl.in | 35 - test/javascript/cli_runner.js | 52 -- test/javascript/couch_http.js | 62 -- test/javascript/run.tpl | 30 - test/view_server/query_server_spec.rb | 824 --------------------- test/view_server/run_native_process.es | 59 -- 143 files changed, 10132 insertions(+), 10146 deletions(-) delete mode 100644 AUTHORS delete mode 100644 BUGS delete mode 100644 CHANGES delete mode 100644 DEVELOPERS delete mode 100644 INSTALL.Unix delete mode 100644 INSTALL.Windows delete mode 100644 LICENSE delete mode 100644 NEWS delete mode 100644 NOTICE delete mode 100644 README delete mode 100644 THANKS create mode 100644 apps/couch/AUTHORS create mode 100644 apps/couch/BUGS create mode 100644 apps/couch/CHANGES create mode 100644 apps/couch/DEVELOPERS create mode 100644 apps/couch/INSTALL.Unix create mode 100644 apps/couch/INSTALL.Windows create mode 100644 apps/couch/LICENSE create mode 100644 apps/couch/NEWS create mode 100644 apps/couch/NOTICE create mode 100644 apps/couch/README create mode 100644 apps/couch/THANKS create mode 100644 apps/couch/license.skip create mode 100644 apps/couch/test/bench/bench_marks.js create mode 100755 apps/couch/test/bench/benchbulk.sh create mode 100755 apps/couch/test/bench/run.tpl create mode 100755 apps/couch/test/etap/001-load.t create mode 100644 apps/couch/test/etap/002-icu-driver.t create mode 100755 apps/couch/test/etap/010-file-basics.t create mode 100755 apps/couch/test/etap/011-file-headers.t create mode 100755 apps/couch/test/etap/020-btree-basics.t create mode 100755 apps/couch/test/etap/021-btree-reductions.t create mode 100755 apps/couch/test/etap/030-doc-from-json.t create mode 100755 apps/couch/test/etap/031-doc-to-json.t create mode 100755 apps/couch/test/etap/040-util.t create mode 100644 apps/couch/test/etap/041-uuid-gen-seq.ini create mode 100644 apps/couch/test/etap/041-uuid-gen-utc.ini create mode 100755 apps/couch/test/etap/041-uuid-gen.t create mode 100755 apps/couch/test/etap/050-stream.t create mode 100755 apps/couch/test/etap/060-kt-merging.t create mode 100755 apps/couch/test/etap/061-kt-missing-leaves.t create mode 100755 apps/couch/test/etap/062-kt-remove-leaves.t create mode 100755 apps/couch/test/etap/063-kt-get-leaves.t create mode 100755 apps/couch/test/etap/064-kt-counting.t create mode 100755 apps/couch/test/etap/065-kt-stemming.t create mode 100755 apps/couch/test/etap/070-couch-db.t create mode 100755 apps/couch/test/etap/080-config-get-set.t create mode 100644 apps/couch/test/etap/081-config-override.1.ini create mode 100644 apps/couch/test/etap/081-config-override.2.ini create mode 100755 apps/couch/test/etap/081-config-override.t create mode 100755 apps/couch/test/etap/082-config-register.t create mode 100755 apps/couch/test/etap/083-config-no-files.t create mode 100755 apps/couch/test/etap/090-task-status.t create mode 100755 apps/couch/test/etap/100-ref-counter.t create mode 100755 apps/couch/test/etap/110-replication-httpc.t create mode 100755 apps/couch/test/etap/111-replication-changes-feed.t create mode 100755 apps/couch/test/etap/112-replication-missing-revs.t create mode 100755 apps/couch/test/etap/113-replication-attachment-comp.t create mode 100755 apps/couch/test/etap/120-stats-collect.t create mode 100644 apps/couch/test/etap/121-stats-aggregates.cfg create mode 100644 apps/couch/test/etap/121-stats-aggregates.ini create mode 100755 apps/couch/test/etap/121-stats-aggregates.t create mode 100755 apps/couch/test/etap/130-attachments-md5.t create mode 100755 apps/couch/test/etap/140-attachment-comp.t create mode 100755 apps/couch/test/etap/150-invalid-view-seq.t create mode 100755 apps/couch/test/etap/160-vhosts.t create mode 100644 apps/couch/test/etap/run.tpl create mode 100644 apps/couch/test/etap/test_util.erl.in create mode 100644 apps/couch/test/javascript/cli_runner.js create mode 100644 apps/couch/test/javascript/couch_http.js create mode 100644 apps/couch/test/javascript/run.tpl create mode 100644 apps/couch/test/view_server/query_server_spec.rb create mode 100755 apps/couch/test/view_server/run_native_process.es delete mode 100644 license.skip delete mode 100644 rebar.config create mode 100644 rel/overlay/var/share/server/filter.js create mode 100644 rel/overlay/var/share/server/json2.js create mode 100644 rel/overlay/var/share/server/loop.js create mode 100644 rel/overlay/var/share/server/mimeparse.js create mode 100644 rel/overlay/var/share/server/render.js create mode 100644 rel/overlay/var/share/server/state.js create mode 100644 rel/overlay/var/share/server/util.js create mode 100644 rel/overlay/var/share/server/validate.js create mode 100644 rel/overlay/var/share/server/views.js delete mode 100644 share/server/filter.js delete mode 100644 share/server/json2.js delete mode 100644 share/server/loop.js delete mode 100644 share/server/mimeparse.js delete mode 100644 share/server/render.js delete mode 100644 share/server/state.js delete mode 100644 share/server/util.js delete mode 100644 share/server/validate.js delete mode 100644 share/server/views.js delete mode 100644 test/bench/bench_marks.js delete mode 100755 test/bench/benchbulk.sh delete mode 100755 test/bench/run.tpl delete mode 100755 test/etap/001-load.t delete mode 100644 test/etap/002-icu-driver.t delete mode 100755 test/etap/010-file-basics.t delete mode 100755 test/etap/011-file-headers.t delete mode 100755 test/etap/020-btree-basics.t delete mode 100755 test/etap/021-btree-reductions.t delete mode 100755 test/etap/030-doc-from-json.t delete mode 100755 test/etap/031-doc-to-json.t delete mode 100755 test/etap/040-util.t delete mode 100644 test/etap/041-uuid-gen-seq.ini delete mode 100644 test/etap/041-uuid-gen-utc.ini delete mode 100755 test/etap/041-uuid-gen.t delete mode 100755 test/etap/050-stream.t delete mode 100755 test/etap/060-kt-merging.t delete mode 100755 test/etap/061-kt-missing-leaves.t delete mode 100755 test/etap/062-kt-remove-leaves.t delete mode 100755 test/etap/063-kt-get-leaves.t delete mode 100755 test/etap/064-kt-counting.t delete mode 100755 test/etap/065-kt-stemming.t delete mode 100755 test/etap/070-couch-db.t delete mode 100755 test/etap/080-config-get-set.t delete mode 100644 test/etap/081-config-override.1.ini delete mode 100644 test/etap/081-config-override.2.ini delete mode 100755 test/etap/081-config-override.t delete mode 100755 test/etap/082-config-register.t delete mode 100755 test/etap/083-config-no-files.t delete mode 100755 test/etap/090-task-status.t delete mode 100755 test/etap/100-ref-counter.t delete mode 100755 test/etap/110-replication-httpc.t delete mode 100755 test/etap/111-replication-changes-feed.t delete mode 100755 test/etap/112-replication-missing-revs.t delete mode 100755 test/etap/113-replication-attachment-comp.t delete mode 100755 test/etap/120-stats-collect.t delete mode 100644 test/etap/121-stats-aggregates.cfg delete mode 100644 test/etap/121-stats-aggregates.ini delete mode 100755 test/etap/121-stats-aggregates.t delete mode 100755 test/etap/130-attachments-md5.t delete mode 100755 test/etap/140-attachment-comp.t delete mode 100755 test/etap/150-invalid-view-seq.t delete mode 100755 test/etap/160-vhosts.t delete mode 100644 test/etap/run.tpl delete mode 100644 test/etap/test_util.erl.in delete mode 100644 test/javascript/cli_runner.js delete mode 100644 test/javascript/couch_http.js delete mode 100644 test/javascript/run.tpl delete mode 100644 test/view_server/query_server_spec.rb delete mode 100755 test/view_server/run_native_process.es diff --git a/AUTHORS b/AUTHORS deleted file mode 100644 index b1a3559e..00000000 --- a/AUTHORS +++ /dev/null @@ -1,20 +0,0 @@ -Apache CouchDB AUTHORS -====================== - -A number of people have contributed directly to Apache CouchDB by writing -documentation or developing software. Some of these people are: - - * Damien Katz - * Jan Lehnardt - * Noah Slater - * Christopher Lenz - * J. Chris Anderson - * Paul Joseph Davis - * Adam Kocoloski - * Jason Davies - * Mark Hammond - * BenoĆ®t Chesneau - * Filipe Manana - * Robert Newson - -For a list of other credits see the `THANKS` file. diff --git a/BUGS b/BUGS deleted file mode 100644 index 8cd1d161..00000000 --- a/BUGS +++ /dev/null @@ -1,6 +0,0 @@ -Apache CouchDB BUGS -=================== - -Please see the [documentation][1] on how to report bugs with Apache CouchDB. - -[1] http://couchdb.apache.org/community/issues.html diff --git a/CHANGES b/CHANGES deleted file mode 100644 index 08458f10..00000000 --- a/CHANGES +++ /dev/null @@ -1,582 +0,0 @@ -Apache CouchDB CHANGES -====================== - -Version 1.0.1 -------------- - -Storage System: - - * Fix data corruption bug COUCHDB-844. Please see - http://couchdb.apache.org/notice/1.0.1.html for details. - -Replicator: - - * Added support for replication via an HTTP/HTTP proxy. - * Fix pull replication of attachments from 0.11 to 1.0.x. - * Make the _changes feed work with non-integer seqnums. - -HTTP Interface: - - * Expose `committed_update_seq` for monitoring purposes. - * Show fields saved along with _deleted=true. Allows for auditing of deletes. - * More robust Accept-header detection. - -Authentication: - - * Enable basic-auth popup when required to access the server, to prevent - people from getting locked out. - -Futon: - - * User interface element for querying stale (cached) views. - -Build and System Integration: - - * Included additional source files for distribution. - -Version 1.0.0 -------------- - -Security: - - * Added authentication caching, to avoid repeated opening and closing of the - users database for each request requiring authentication. - -Storage System: - - * Small optimization for reordering result lists. - * More efficient header commits. - * Use O_APPEND to save lseeks. - * Faster implementation of pread_iolist(). Further improves performance on - concurrent reads. - -View Server: - - * Faster default view collation. - * Added option to include update_seq in view responses. - -Version 0.11.2 --------------- - -Replicator: - - * Fix bug when pushing design docs by non-admins, which was hanging the - replicator for no good reason. - * Fix bug when pulling design documents from a source that requires - basic-auth. - -HTTP Interface: - - * Better error messages on invalid URL requests. - -Authentication: - - * User documents can now be deleted by admins or the user. - -Security: - - * Avoid potential DOS attack by guarding all creation of atoms. - -Futon: - - * Add some Futon files that were missing from the Makefile. - -Version 0.11.1 --------------- - -HTTP Interface: - - * Mask passwords in active tasks and logging. - * Update mochijson2 to allow output of BigNums not in float form. - * Added support for X-HTTP-METHOD-OVERRIDE. - * Better error message for database names. - * Disable jsonp by default. - * Accept gzip encoded standalone attachments. - * Made max_concurrent_connections configurable. - * Made changes API more robust. - * Send newly generated document rev to callers of an update function. - -Futon: - - * Use "expando links" for over-long document values in Futon. - * Added continuous replication option. - * Added option to replicating test results anonymously to a community - CouchDB instance. - * Allow creation and deletion of config entries. - * Fixed display issues with doc ids that have escaped characters. - * Fixed various UI issues. - -Build and System Integration: - - * Output of `couchdb --help` has been improved. - * Fixed compatibility with the Erlang R14 series. - * Fixed warnings on Linux builds. - * Fixed build error when aclocal needs to be called during the build. - * Require ICU 4.3.1. - * Fixed compatibility with Solaris. - -Security: - - * Added authentication redirect URL to log in clients. - * Fixed query parameter encoding issue in oauth.js. - * Made authentication timeout configurable. - * Temporary views are now admin-only resources. - -Storage System: - - * Don't require a revpos for attachment stubs. - * Added checking to ensure when a revpos is sent with an attachment stub, - it's correct. - * Make file deletions async to avoid pauses during compaction and db - deletion. - * Fixed for wrong offset when writing headers and converting them to blocks, - only triggered when header is larger than 4k. - * Preserve _revs_limit and instance_start_time after compaction. - -Configuration System: - - * Fixed timeout with large .ini files. - -JavaScript Clients: - - * Added tests for couch.js and jquery.couch.js - * Added changes handler to jquery.couch.js. - * Added cache busting to jquery.couch.js if the user agent is msie. - * Added support for multi-document-fetch (via _all_docs) to jquery.couch.js. - * Added attachment versioning to jquery.couch.js. - * Added option to control ensure_full_commit to jquery.couch.js. - * Added list functionality to jquery.couch.js. - * Fixed issues where bulkSave() wasn't sending a POST body. - -View Server: - - * Provide a UUID to update functions (and all other functions) that they can - use to create new docs. - * Upgrade CommonJS modules support to 1.1.1. - * Fixed erlang filter funs and normalize filter fun API. - * Fixed hang in view shutdown. - -Log System: - - * Log HEAD requests as HEAD, not GET. - * Keep massive JSON blobs out of the error log. - * Fixed a timeout issue. - -Replication System: - - * Refactored various internal APIs related to attachment streaming. - * Fixed hanging replication. - * Fixed keepalive issue. - -URL Rewriter & Vhosts: - - * Allow more complex keys in rewriter. - * Allow global rewrites so system defaults are available in vhosts. - * Allow isolation of databases with vhosts. - * Fix issue with passing variables to query parameters. - -Test Suite: - - * Made the test suite overall more reliable. - -Version 0.11.0 --------------- - -Security: - - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - * Added default cookie-authentication and users database. - * Added Futon user interface for user signup and login. - * Added per-database reader access control lists. - * Added per-database security object for configuration data in validation - functions. - * Added proxy authentication handler - -HTTP Interface: - - * Provide Content-MD5 header support for attachments. - * Added URL Rewriter handler. - * Added virtual host handling. - -View Server: - - * Added optional 'raw' binary collation for faster view builds where Unicode - collation is not important. - * Improved view index build time by reducing ICU collation callouts. - * Improved view information objects. - * Bug fix for partial updates during view builds. - * Move query server to a design-doc based protocol. - * Use json2.js for JSON serialization for compatiblity with native JSON. - * Major refactoring of couchjs to lay the groundwork for disabling cURL - support. The new HTTP interaction acts like a synchronous XHR. Example usage - of the new system is in the JavaScript CLI test runner. - -Replication: - - * Added option to implicitly create replication target databases. - * Avoid leaking file descriptors on automatic replication restarts. - * Added option to replicate a list of documents by id. - * Allow continuous replication to be cancelled. - -Storage System: - - * Adds batching of multiple updating requests, to improve throughput with many - writers. Removed the now redundant couch_batch_save module. - * Adds configurable compression of attachments. - -Runtime Statistics: - - * Statistics are now calculated for a moving window instead of non-overlapping - timeframes. - * Fixed a problem with statistics timers and system sleep. - * Moved statistic names to a term file in the priv directory. - -Futon: - - * Added a button for view compaction. - * JSON strings are now displayed as-is in the document view, without the escaping of - new-lines and quotes. That dramatically improves readability of multi-line - strings. - * Same goes for editing of JSON string values. When a change to a field value is - submitted, and the value is not valid JSON it is assumed to be a string. This - improves editing of multi-line strings a lot. - * Hitting tab in textareas no longer moves focus to the next form field, but simply - inserts a tab character at the current caret position. - * Fixed some font declarations. - -Build and System Integration: - - * Updated and improved source documentation. - * Fixed distribution preparation for building on Mac OS X. - * Added support for building a Windows installer as part of 'make dist'. - * Bug fix for building couch.app's module list. - * ETap tests are now run during make distcheck. This included a number of - updates to the build system to properly support VPATH builds. - * Gavin McDonald setup a build-bot instance. More info can be found at - http://ci.apache.org/buildbot.html - -Version 0.10.1 --------------- - -Replicator: - - * Stability enhancements regarding redirects, timeouts, OAuth. - -Query Server: - - * Avoid process leaks - * Allow list and view to span languages - -Stats: - - * Eliminate new process flood on system wake - -Build and System Integration: - - * Test suite now works with the distcheck target. - -Version 0.10.0 --------------- - -Storage Format: - - * Add move headers with checksums to the end of database files for extra robust - storage and faster storage. - -View Server: - - * Added native Erlang views for high-performance applications. - -HTTP Interface: - - * Added optional cookie-based authentication handler. - * Added optional two-legged OAuth authentication handler. - -Build and System Integration: - - * Changed `couchdb` script configuration options. - * Added default.d and local.d configuration directories to load sequence. - - -Version 0.9.2 -------------- - -Replication: - - * Fix replication with 0.10 servers initiated by an 0.9 server (COUCHDB-559). - -Build and System Integration: - - * Remove branch callbacks to allow building couchjs against newer versions of - Spidermonkey. - -Version 0.9.1 -------------- - -Build and System Integration: - - * PID file directory is now created by the SysV/BSD daemon scripts. - * Fixed the environment variables shown by the configure script. - * Fixed the build instructions shown by the configure script. - * Updated ownership and permission advice in `README` for better security. - -Configuration and stats system: - - * Corrected missing configuration file error message. - * Fixed incorrect recording of request time. - -Database Core: - - * Document validation for underscore prefixed variables. - * Made attachment storage less sparse. - * Fixed problems when a database with delayed commits pending is considered - idle, and subject to losing changes when shutdown. (COUCHDB-334) - -External Handlers: - - * Fix POST requests. - -Futon: - - * Redirect when loading a deleted view URI from the cookie. - -HTTP Interface: - - * Attachment requests respect the "rev" query-string parameter. - -JavaScript View Server: - - * Useful JavaScript Error messages. - -Replication: - - * Added support for Unicode characters transmitted as UTF-16 surrogate pairs. - * URL-encode attachment names when necessary. - * Pull specific revisions of an attachment, instead of just the latest one. - * Work around a rare chunk-merging problem in ibrowse. - * Work with documents containing Unicode characters outside the Basic - Multilingual Plane. - -Version 0.9.0 -------------- - -Futon Utility Client: - - * Added pagination to the database listing page. - * Implemented attachment uploading from the document page. - * Added page that shows the current configuration, and allows modification of - option values. - * Added a JSON "source view" for document display. - * JSON data in view rows is now syntax highlighted. - * Removed the use of an iframe for better integration with browser history and - bookmarking. - * Full database listing in the sidebar has been replaced by a short list of - recent databases. - * The view editor now allows selection of the view language if there is more - than one configured. - * Added links to go to the raw view or document URI. - * Added status page to display currently running tasks in CouchDB. - * JavaScript test suite split into multiple files. - * Pagination for reduce views. - -Design Document Resource Paths: - - * Added httpd_design_handlers config section. - * Moved _view to httpd_design_handlers. - * Added ability to render documents as non-JSON content-types with _show and - _list functions, which are also httpd_design_handlers. - -HTTP Interface: - - * Added client side UUIDs for idempotent document creation - * HTTP COPY for documents - * Streaming of chunked attachment PUTs to disk - * Remove negative count feature - * Add include_docs option for view queries - * Add multi-key view post for views - * Query parameter validation - * Use stale=ok to request potentially cached view index - * External query handler module for full-text or other indexers. - * Etags for attachments, views, shows and lists - * Show and list functions for rendering documents and views as developer - controlled content-types. - * Attachment names may use slashes to allow uploading of nested directories - (useful for static web hosting). - * Option for a view to run over design documents. - * Added newline to JSON responses. Closes bike-shed. - -Replication: - - * Using ibrowse. - * Checkpoint replications so failures are less expensive. - * Automatically retry of failed replications. - * Stream attachments in pull-replication. - -Database Core: - - * Faster B-tree implementation. - * Changed internal JSON term format. - * Improvements to Erlang VM interactions under heavy load. - * User context and administrator role. - * Update validations with design document validation functions. - * Document purge functionality. - * Ref-counting for database file handles. - -Build and System Integration: - - * The `couchdb` script now supports system chainable configuration files. - * The Mac OS X daemon script now redirects STDOUT and STDERR like SysV/BSD. - * The build and system integration have been improved for portability. - * Added COUCHDB_OPTIONS to etc/default/couchdb file. - * Remove COUCHDB_INI_FILE and COUCHDB_PID_FILE from etc/default/couchdb file. - * Updated `configure.ac` to manually link `libm` for portability. - * Updated `configure.ac` to extended default library paths. - * Removed inets configuration files. - * Added command line test runner. - * Created dev target for make. - -Configuration and stats system: - - * Separate default and local configuration files. - * HTTP interface for configuration changes. - * Statistics framework with HTTP query API. - -Version 0.8.1-incubating ------------------------- - -Database Core: - - * Fix for replication problems where the write queues can get backed up if the - writes aren't happening fast enough to keep up with the reads. For a large - replication, this can exhaust memory and crash, or slow down the machine - dramatically. The fix keeps only one document in the write queue at a time. - * Fix for databases sometimes incorrectly reporting that they contain 0 - documents after compaction. - * CouchDB now uses ibrowse instead of inets for its internal HTTP client - implementation. This means better replication stability. - -HTTP Interface: - - * Fix for chunked responses where chunks were always being split into multiple - TCP packets, which caused problems with the test suite under Safari, and in - some other cases. - * Fix for an invalid JSON response body being returned for some kinds of - views. (COUCHDB-84) - * Fix for connections not getting closed after rejecting a chunked request. - (COUCHDB-55) - * CouchDB can now be bound to IPv6 addresses. - * The HTTP `Server` header now contains the versions of CouchDB and Erlang. - -JavaScript View Server: - - * Sealing of documents has been disabled due to an incompatibility with - SpiderMonkey 1.9. - * Improve error handling for undefined values emitted by map functions. - (COUCHDB-83) - -Build and System Integration: - - * The `couchdb` script no longer uses `awk` for configuration checks as this - was causing portability problems. - * Updated `sudo` example in `README` to use the `-i` option, this fixes - problems when invoking from a directory the `couchdb` user cannot access. - -Futon: - - * The view selector dropdown should now work in Opera and Internet Explorer - even when it includes optgroups for design documents. (COUCHDB-81) - -Version 0.8.0-incubating ------------------------- - -Database Core: - - * The view engine has been completely decoupled from the storage engine. Index - data is now stored in separate files, and the format of the main database - file has changed. - * Databases can now be compacted to reclaim space used for deleted documents - and old document revisions. - * Support for incremental map/reduce views has been added. - * To support map/reduce, the structure of design documents has changed. View - values are now JSON objects containing at least a `map` member, and - optionally a `reduce` member. - * View servers are now identified by name (for example `javascript`) instead of - by media type. - * Automatically generated document IDs are now based on proper UUID generation - using the crypto module. - * The field `content-type` in the JSON representation of attachments has been - renamed to `content_type` (underscore). - -HTTP Interface: - - * CouchDB now uses MochiWeb instead of inets for the HTTP server - implementation. Among other things, this means that the extra configuration - files needed for inets (such as `couch_httpd.conf`) are no longer used. - * The HTTP interface now completely supports the `HEAD` method. (COUCHDB-3) - * Improved compliance of `Etag` handling with the HTTP specification. - (COUCHDB-13) - * Etags are no longer included in responses to document `GET` requests that - include query string parameters causing the JSON response to change without - the revision or the URI having changed. - * The bulk document update API has changed slightly on both the request and the - response side. In addition, bulk updates are now atomic. - * CouchDB now uses `TCP_NODELAY` to fix performance problems with persistent - connections on some platforms due to nagling. - * Including a `?descending=false` query string parameter in requests to views - no longer raises an error. - * Requests to unknown top-level reserved URLs (anything with a leading - underscore) now return a `unknown_private_path` error instead of the - confusing `illegal_database_name`. - * The Temporary view handling now expects a JSON request body, where the JSON - is an object with at least a `map` member, and optional `reduce` and - `language` members. - * Temporary views no longer determine the view server based on the Content-Type - header of the `POST` request, but rather by looking for a `language` member - in the JSON body of the request. - * The status code of responses to `DELETE` requests is now 200 to reflect that - that the deletion is performed synchronously. - -JavaScript View Server: - - * SpiderMonkey is no longer included with CouchDB, but rather treated as a - normal external dependency. A simple C program (`_couchjs`) is provided that - links against an existing SpiderMonkey installation and uses the interpreter - embedding API. - * View functions using the default JavaScript view server can now do logging - using the global `log(message)` function. Log messages are directed into the - CouchDB log at `INFO` level. (COUCHDB-59) - * The global `map(key, value)` function made available to view code has been - renamed to `emit(key, value)`. - * Fixed handling of exceptions raised by view functions. - -Build and System Integration: - - * CouchDB can automatically respawn following a server crash. - * Database server no longer refuses to start with a stale PID file. - * System logrotate configuration provided. - * Improved handling of ICU shared libraries. - * The `couchdb` script now automatically enables SMP support in Erlang. - * The `couchdb` and `couchjs` scripts have been improved for portability. - * The build and system integration have been improved for portability. - -Futon: - - * When adding a field to a document, Futon now just adds a field with an - autogenerated name instead of prompting for the name with a dialog. The name - is automatically put into edit mode so that it can be changed immediately. - * Fields are now sorted alphabetically by name when a document is displayed. - * Futon can be used to create and update permanent views. - * The maximum number of rows to display per page on the database page can now - be adjusted. - * Futon now uses the XMLHTTPRequest API asynchronously to communicate with the - CouchDB HTTP server, so that most operations no longer block the browser. - * View results sorting can now be switched between ascending and descending by - clicking on the `Key` column header. - * Fixed a bug where documents that contained a `@` character could not be - viewed. (COUCHDB-12) - * The database page now provides a `Compact` button to trigger database - compaction. (COUCHDB-38) - * Fixed portential double encoding of document IDs and other URI segments in - many instances. (COUCHDB-39) - * Improved display of attachments. - * The JavaScript Shell has been removed due to unresolved licensing issues. diff --git a/DEVELOPERS b/DEVELOPERS deleted file mode 100644 index a7a6926e..00000000 --- a/DEVELOPERS +++ /dev/null @@ -1,95 +0,0 @@ -Apache CouchDB DEVELOPERS -========================= - -Only follow these instructions if you are building from a source checkout. - -If you're unsure what this means, ignore this document. - -Dependencies ------------- - -You will need the following installed: - - * GNU Automake (>=1.6.3) (http://www.gnu.org/software/automake/) - * GNU Autoconf (>=2.59) (http://www.gnu.org/software/autoconf/) - * GNU Libtool (http://www.gnu.org/software/libtool/) - * GNU help2man (http://www.gnu.org/software/help2man/) - -The `help2man` tool is optional, but will generate `man` pages for you. - -Debian-based (inc. Ubuntu) Systems -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can install the dependencies by running: - - apt-get install automake autoconf libtool help2man - -Be sure to update the version numbers to match your system's available packages. - -Mac OS X -~~~~~~~~ - -You can install the dependencies by running: - - port install automake autoconf libtool help2man - -You will need MacPorts installed to use the `port` command. - -Bootstrapping -------------- - -Bootstrap the pristine source by running: - - ./bootstrap - -You must repeat this step every time you update your source checkout. - -Testing -------- - -Check the test suite by running: - - make check - -Generate a coverage report by running: - - make cover - -Please report any problems to the developer's mailing list. - -Releasing ---------- - -Unix-like Systems -~~~~~~~~~~~~~~~~~ - -Configure the source by running: - - ./configure - -Prepare the release artefacts by running: - - make distcheck - -You can prepare signed release artefacts by running: - - make distsign - -The release artefacts can be found in the root source directory. - -Microsoft Windows -~~~~~~~~~~~~~~~~~ - -Configure the source by running: - - ./configure - -Prepare the release artefacts by running: - - make dist - -The release artefacts can be found in the `etc/windows` directory. - -Until the build system has been improved, you must make sure that you run this -command from a clean source checkout. If you do not, your test database and log -files will be bundled up in the release artefact. diff --git a/INSTALL.Unix b/INSTALL.Unix deleted file mode 100644 index 768e3846..00000000 --- a/INSTALL.Unix +++ /dev/null @@ -1,231 +0,0 @@ -Apache CouchDB README.Unix -========================== - -A high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu. - -Dependencies ------------- - -You will need the following installed: - - * Erlang OTP (>=R12B5) (http://erlang.org/) - * ICU (http://icu.sourceforge.net/) - * OpenSSL (http://www.openssl.org/) - * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/) - * libcurl (http://curl.haxx.se/libcurl/) - * GNU Make (http://www.gnu.org/software/make/) - * GNU Compiler Collection (http://gcc.gnu.org/) - -It is recommended that you install Erlang OTP R12B-5 or above where possible. - -Ubuntu -~~~~~~ - -See - - http://wiki.apache.org/couchdb/Installing_on_Ubuntu - -for updated instructions on how to install on Ubuntu. - -Debian-based Systems -~~~~~~~~~~~~~~~~~~~~ - -You can install the build tools by running: - - sudo apt-get install build-essential - -You can install the other dependencies by running: - - sudo apt-get install erlang libicu-dev libmozjs-dev libcurl4-openssl-dev - -Be sure to update the version numbers to match your system's available packages. - -Mac OS X -~~~~~~~~ - -You can install the build tools by running: - - open /Applications/Installers/Xcode\ Tools/XcodeTools.mpkg - -You can install the other dependencies by running: - - sudo port install icu erlang spidermonkey curl - -You will need MacPorts installed to use the `port` command. - -Installing ----------- - -Once you have satisfied the dependencies you should run: - - ./configure - -This script will configure CouchDB to be installed into `/usr/local` by default. - -If you wish to customise the installation, pass `--help` to this script. - -If everything was successful you should see the following message: - - You have configured Apache CouchDB, time to relax. - -Relax. - -To install CouchDB you should run: - - make && sudo make install - -You only need to use `sudo` if you're installing into a system directory. - -Try `gmake` if `make` is giving you any problems. - -If everything was successful you should see the following message: - - You have installed Apache CouchDB, time to relax. - -Relax. - -First Run ---------- - -You can start the CouchDB server by running: - - sudo -i -u couchdb couchdb - -This uses the `sudo` command to run the `couchdb` command as the `couchdb` user. - -When CouchDB starts it should eventually display the following message: - - Apache CouchDB has started, time to relax. - -Relax. - -To check that everything has worked, point your web browser to: - - http://127.0.0.1:5984/_utils/index.html - -From here you should run the test suite. - -Security Considerations ------------------------ - -You should create a special `couchdb` user for CouchDB. - -On many Unix-like systems you can run: - - adduser --system \ - --home /usr/local/var/lib/couchdb \ - --no-create-home \ - --shell /bin/bash \ - --group --gecos \ - "CouchDB Administrator" couchdb - -On Mac OS X you can use the Workgroup Manager to create users: - - http://www.apple.com/support/downloads/serveradmintools1047.html - -You must make sure that: - - * The user has a working POSIX shell - - * The user's home directory is `/usr/local/var/lib/couchdb` - -You can test this by: - - * Trying to log in as the `couchdb` user - - * Running `pwd` and checking the present working directory - -Change the ownership of the CouchDB directories by running: - - chown -R couchdb:couchdb /usr/local/etc/couchdb - chown -R couchdb:couchdb /usr/local/var/lib/couchdb - chown -R couchdb:couchdb /usr/local/var/log/couchdb - chown -R couchdb:couchdb /usr/local/var/run/couchdb - -Change the permission of the CouchDB directories by running: - - chmod 0770 /usr/local/etc/couchdb - chmod 0770 /usr/local/var/lib/couchdb - chmod 0770 /usr/local/var/log/couchdb - chmod 0770 /usr/local/var/run/couchdb - -Running as a Daemon -------------------- - -SysV/BSD-style Systems -~~~~~~~~~~~~~~~~~~~~~~ - -You can use the `couchdb` init script to control the CouchDB daemon. - -On SysV-style systems, the init script will be installed into: - - /usr/local/etc/init.d - -On BSD-style systems, the init script will be installed into: - - /usr/local/etc/rc.d - -We use the `[init.d|rc.d]` notation to refer to both of these directories. - -You can control the CouchDB daemon by running: - - /usr/local/etc/[init.d|rc.d]/couchdb [start|stop|restart|status] - -If you wish to configure how the init script works, you can edit: - - /usr/local/etc/default/couchdb - -Comment out the `COUCHDB_USER` setting if you're running as a non-superuser. - -To start the daemon on boot, copy the init script to: - - /etc/[init.d|rc.d] - -You should then configure your system to run the init script automatically. - -You may be able to run: - - sudo update-rc.d couchdb defaults - -If this fails, consult your system documentation for more information. - -A `logrotate` configuration is installed into: - - /usr/local/etc/logrotate.d/couchdb - -Consult your `logrotate` documentation for more information. - -It is critical that the CouchDB logs are rotated so as not to fill your disk. - -Mac OS X -~~~~~~~~ - -You can use the `launchctl` command to control the CouchDB daemon. - -You can load the configuration by running: - - sudo launchctl load \ - /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist - -You can stop the CouchDB daemon by running: - - sudo launchctl unload \ - /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist - -You can start CouchDB by running: - - sudo launchctl start org.apache.couchdb - -You can restart CouchDB by running: - - sudo launchctl stop org.apache.couchdb - -You can edit the launchd configuration by running: - - open /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist - -To start the daemon on boot, copy the configuration file to: - - /Library/LaunchDaemons - -Consult your system documentation for more information. diff --git a/INSTALL.Windows b/INSTALL.Windows deleted file mode 100644 index 5c4a9587..00000000 --- a/INSTALL.Windows +++ /dev/null @@ -1,148 +0,0 @@ -Apache CouchDB README.Windows -============================== - -For a high-level guide to Microsoft Windows. - -Dependencies ------------- - -You will need the following installed: - - * Erlang OTP (>=R12B5) (http://erlang.org/) - * ICU (http://icu.sourceforge.net/) - * OpenSSL (http://www.openssl.org/) - * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/) - * libcurl (http://curl.haxx.se/libcurl/) - * Cygwin (http://www.cygwin.com/) - * Visual Studio 2008 (http://msdn.microsoft.com/en-gb/vstudio/default.aspx) - -General Notes -------------- - - * When installing Erlang, you must build it from source. - - The CouchDB build makes use of a number of the Erlang build scripts. - - * When installing ICU, select the binaries built with Visual Studio 2008. - - * When installing Cygwin, be sure to select all the `development` tools. - - * When installing libcurl, be sure to install by hand. - - The Cygwin binaries are incompatible and will not work with Erlang. - -Setting Up Cygwin ------------------ - -Before starting any Cygwin terminals, run: - - set CYGWIN=nontsec - -To set up your environment, run: - - [VS_BIN]/vcvars32.bat - -Replace [VS_BIN] with the path to your Visual Studio `bin` directory. - -You must check that: - - * The `which link` command points to the Microsoft linker. - - * The `which cl` command points to the Microsoft compiler. - -If you do not do this, the ones found in `/usr/bin` may be used instead. - -Building Erlang ---------------- - -You must include OpenSSL. - -However, you can skip the GUI tools by running: - - echo "skipping gs" > lib/gs/SKIP - - echo "skipping ic" > lib/ic/SKIP - -Follow the rest of the Erlang instructions as described. - -After running: - - ./otp_build release -a - -You should run: - - ./release/win32/Install.exe - -This will set up the release/win32/bin directory correctly. - -To set up your environment for building CouchDB, run: - - eval `./otp_build env_win32` - -To set up the `ERL_TOP` environment variable, run: - - export ERL_TOP=[ERL_TOP] - -Replace `[ERL_TOP]` with the Erlang source directory name. - -Remember to use `/cygdrive/c/` instead of `c:/` as the directory prefix. - -To set up your path, run: - - export PATH=$ERL_TOP/release/win32/erts-5.7.2/bin:$PATH - -If everything was successful, you should be ready to build CouchDB. - -Relax. - -Building CouchDB ----------------- - -Once you have satisfied the dependencies you should run: - - ./configure \ - --with-js-include=/cygdrive/c/path_to_seamonkey_include \ - --with-js-lib=/cygdrive/c/path_to_seamonkey_lib \ - --with-win32-icu-binaries=/cygdrive/c/path_to_icu_binaries_root \ - --with-erlang=$ERL_TOP/release/win32/usr/include \ - --with-win32-curl=/cygdrive/c/path/to/curl/root/directory \ - --with-openssl-bin-dir=/cygdrive/c/openssl/bin \ - --with-msvc-redist-dir=/cygdrive/c/dir/with/vcredist_platform_executable \ - --prefix=$ERL_TOP/release/win32 - -This command could take a while to complete. - -If everything was successful you should see the following message: - - You have configured Apache CouchDB, time to relax. - -Relax. - -To install CouchDB you should run: - - make install - -If everything was successful you should see the following message: - - You have installed Apache CouchDB, time to relax. - -Relax. - -First Run ---------- - -You can start the CouchDB server by running: - - $ERL_TOP/release/win32/bin/couchdb.bat - -When CouchDB starts it should eventually display the following message: - - Apache CouchDB has started, time to relax. - -Relax. - -To check that everything has worked, point your web browser to: - - http://127.0.0.1:5984/_utils/index.html - -From here you should run the test suite. diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 20a425b1..00000000 --- a/LICENSE +++ /dev/null @@ -1,371 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - -Apache CouchDB Subcomponents - -The Apache CouchDB project includes a number of subcomponents with separate -copyright notices and license terms. Your use of the code for the these -subcomponents is subject to the terms and conditions of the following licenses. - -For the m4/ac_check_icu.m4 component: - - Copyright (c) 2005 Akos Maroy - - Copying and distribution of this file, with or without modification, are - permitted in any medium without royalty provided the copyright notice - and this notice are preserved. - -For the share/www/script/jquery.js component: - - Copyright (c) 2009 John Resig, http://jquery.com/ - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -For the share/www/script/jquery.form.js component: - - http://malsup.com/jquery/form/ - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -For the share/www/script/json2.js component: - - Public Domain - - No warranty expressed or implied. Use at your own risk. - -For the src/mochiweb component: - - Copyright (c) 2007 Mochi Media, Inc. - - Permission is hereby granted, free of charge, to any person obtaining - a copy of this software and associated documentation files (the - "Software"), to deal in the Software without restriction, including - without limitation the rights to use, copy, modify, merge, publish, - distribute, sublicense, and/or sell copies of the Software, and to - permit persons to whom the Software is furnished to do so, subject to - the following conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -For the src/ibrowse component: - - Copyright (c) 2006, Chandrashekhar Mullaparthi - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the T-Mobile nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR - ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON - ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -For the src/erlang-oauth component: - - Copyright (c) 2008-2009 Tim Fletcher - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - OTHER DEALINGS IN THE SOFTWARE. - -For the src/etap component: - - Copyright (c) 2008-2009 Nick Gerakines - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice shall be - included in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - OTHER DEALINGS IN THE SOFTWARE. - diff --git a/NEWS b/NEWS deleted file mode 100644 index bd6b5d2b..00000000 --- a/NEWS +++ /dev/null @@ -1,276 +0,0 @@ -Apache CouchDB NEWS -=================== - -For details about backwards incompatible changes, see: - - http://wiki.apache.org/couchdb/Breaking_changes - -Each release section notes when backwards incompatible changes have been made. - -Version 1.0.1 -------------- - - * Fix data corruption bug COUCHDB-844. Please see - http://couchdb.apache.org/notice/1.0.1.html for details. - * Added support for replication via an HTTP/HTTP proxy. - * Fixed various replicator bugs for interop with older CouchDB versions. - * Show fields saved along with _deleted=true. Allows for auditing of deletes. - * Enable basic-auth popup when required to access the server, to prevent - people from getting locked out. - * User interface element for querying stale (cached) views. - -Version 1.0.0 -------------- - - * More efficient header commits. - * Use O_APPEND to save lseeks. - * Faster implementation of pread_iolist(). Further improves performance on - concurrent reads. - * Added authentication caching - * Faster default view collation. - * Added option to include update_seq in view responses. - -Version 0.11.2 --------------- - - * Replicator buxfixes for replicating design documents from secured databases. - * Better error messages on invalid URL requests. - * User documents can now be deleted by admins or the user. - * Avoid potential DOS attack by guarding all creation of atoms. - * Some Futon and JavaScript library bugfixes. - -Version 0.11.1 --------------- - - * Mask passwords in active tasks and logging. - * Update mochijson2 to allow output of BigNums not in float form. - * Added support for X-HTTP-METHOD-OVERRIDE. - * Disable jsonp by default. - * Accept gzip encoded standalone attachments. - * Made max_concurrent_connections configurable. - * Added continuous replication option to Futon. - * Added option to replicating test results anonymously to a community - CouchDB instance. - * Allow creation and deletion of config entries in Futon. - * Fixed various UI issues in Futon. - * Fixed compatibility with the Erlang R14 series. - * Fixed warnings on Linux builds. - * Fixed build error when aclocal needs to be called during the build. - * Require ICU 4.3.1. - * Fixed compatibility with Solaris. - * Added authentication redirect URL to log in clients. - * Added authentication caching, to avoid repeated opening and closing of the - users database for each request requiring authentication. - * Made authentication timeout configurable. - * Temporary views are now admin-only resources. - * Don't require a revpos for attachment stubs. - * Make file deletions async to avoid pauses during compaction and db - deletion. - * Fixed for wrong offset when writing headers and converting them to blocks, - only triggered when header is larger than 4k. - * Preserve _revs_limit and instance_start_time after compaction. - * Fixed timeout with large .ini files. - * Added tests for couch.js and jquery.couch.js - * Added various API features to jquery.couch.js - * Faster default view collation. - * Upgrade CommonJS modules support to 1.1.1. - * Added option to include update_seq in view responses. - * Fixed erlang filter funs and normalize filter fun API. - * Fixed hang in view shutdown. - * Refactored various internal APIs related to attachment streaming. - * Fixed hanging replication. - * Fixed keepalive issue. - * Allow global rewrites so system defaults are available in vhosts. - * Allow isolation of databases with vhosts. - * Made the test suite overall more reliable. - -Version 0.11.0 --------------- - -This version is a feature-freeze release candidate for Apache CouchDB 1.0. - - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - * Added support for building a Windows installer as part of 'make dist'. - * Added optional 'raw' binary collation for faster view builds where Unicode - collation is not important. - * Improved view index build time by reducing ICU collation callouts. - * Added option to implicitly create replication target databases. - * Improved view information objects. - * Bug fix for partial updates during view builds. - * Bug fix for building couch.app's module list. - * Fixed a problem with statistics timers and system sleep. - * Improved the statistics calculations to use an online moving window - algorithm. - * Adds batching of multiple updating requests, to improve throughput with many - writers. - * Removed the now redundant couch_batch_save module. - * Bug fix for premature termination of chunked responses. - * Improved speed and concurrency of config lookups. - * Fixed an edge case for HTTP redirects during replication. - * Fixed HTTP timeout handling for replication. - * Fixed query parameter handling in OAuth'd replication. - * Fixed a bug preventing mixing languages with lists and views. - * Avoid OS process leaks in lists. - * Avoid leaking file descriptors on automatic replication restarts. - * Various improvements to the Futon UI. - * Provide Content-MD5 header support for attachments. - * Added default cookie-authentication and users db. - * Added per-db reader access control lists. - * Added per-db security object for configuration data in validation functions. - * Added URL Rewriter handler. - * Added proxy authentication handler. - * Added ability to replicate documents by id. - * Added virtual host handling. - * Uses json2.js for JSON serialization compatiblity with native JSON. - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - -Version 0.10.2 --------------- - - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - -Version 0.10.1 --------------- - - * Fixed test suite to work with build system. - * Fixed a problem with statistics timers and system sleep. - * Fixed an edge case for HTTP redirects during replication. - * Fixed HTTP timeout handling for replication. - * Fixed query parameter handling in OAuth'd replication. - * Fixed a bug preventing mixing languages with lists and views. - * Avoid OS process leaks in lists. - -Version 0.10.0 --------------- - -This release contains backwards incompatible changes, please see above for help. - - * General performance improvements. - * View index generation speedups. - * Even more robust storage format. - * Native Erlang Views for high-performance applications. - * More robust push and pull replication. - * Two-legged OAuth support for applications and replication (three-legged in - preparation). - * Cookie authentication. - * API detail improvements. - * Better RFC 2616 (HTTP 1.1) compliance. - * Added modular configuration file directories. - * Miscellaneous improvements to build, system integration, and portability. - -Version 0.9.2 -------------- - - * Remove branch callbacks to allow building couchjs against newer versions of - Spidermonkey. - * Fix replication with 0.10 servers initiated by an 0.9 server. - -Version 0.9.1 -------------- - - * Various bug fixes for the build system, configuration, statistics reporting, - database core, external handlers, Futon interface, HTTP interface, - JavaScript View Server and replicator. - -Version 0.9.0 -------------- - -This release contains backwards incompatible changes, please see above for help. - - * Modular configuration. - * Performance enhancements for document and view access. - * More resilient replication process. - * Replication streams binary attachments. - * Administrator role and basic authentication. - * Document validation functions in design documents. - * Show and list functions for rendering documents and views as developer - controlled content-types. - * External process server module. - * Attachment uploading from Futon. - * Etags for views, lists, shows, document and attachment requests. - * Miscellaneous improvements to build, system integration, and portability. - -Version 0.8.1-incubating ------------------------- - - * Various bug fixes for replication, compaction, the HTTP interface and the - JavaScript View Server. - -Version 0.8.0-incubating ------------------------- - -This release contains backwards incompatible changes, please see above for help. - - * Changed core licensing to the Apache Software License 2.0. - * Refactoring of the core view and storage engines. - * Added support for incremental map/reduce views. - * Changed database file format. - * Many improvements to Futon, the web administration interface. - * Miscellaneous improvements to build, system integration, and portability. - * Swapped out Erlang's inets HTTP server for the Mochiweb HTTP server. - * SpiderMonkey is no longer included with CouchDB, but rather treated as an - external dependency. - * Added bits of awesome. - -Version 0.7.2 -------------- - - * Small changes to build process and `couchdb` command. - * Database server official port is now 5984 TCP/UDP instead of 8888. - -Version 0.7.1 -------------- - - * Small compatibility issue with Firefox 3 fixed. - -Version 0.7.0 -------------- - - * Infrastructure rewritten to use the GNU build system for portability. - * The built-in database browsing tool has been rewritten to provide a much - nicer interface for interacting directly with CouchDB from your web browser. - * XML and Fabric have been replaced with JSON and JavaScript for data - transport and View definitions. - -Version 0.6.0 -------------- - - * A replication facility is now available. - * CouchPeek can now create, delete and view documents. - * Building from source is easier and less error prone. - -Version 0.5.0 -------------- - - * A built-in CouchPeek utility. - * A full install kit buildable from a single command. - * A new GNU/Linux version is available. An OS X version is coming soon. - -Version 0.4.0 -------------- - - * Non-existent variables are now nil lists. - * Couch error codes and messages are no longer sent in the HTTP fields, - instead they are exclusively returned in the XML body. This is to avoid HTTP - header parsing problems with oddly formed error messages. - * Returned error messages are now logged at the server at the `info` level to - make general debugging easier. - * Fixed a problem where big table builds caused timeout errors. - * Lots of changes in the low level machinery. Most formulas will continue to - function the same. - * Added full compiler support for extended characters in formula source. - * Support for Perl/Ruby like regular expressions. - * Added `total_rows` and `result_start` attributes to tables. - -Version 0.3.0 -------------- - - * CouchDB now fully supports Unicode and locale specific collation via the ICU - library, both in the Fabric engine and computed tables. - * The `in` operator has been added to Fabric. - * The `startdoc` query string variable specifies the starting document to use - if there are multiple rows with identical startkeys. - * The `skip` query string variable specifies the number of rows to skip before - returning results. The `skip` value must be a positive integer. If used with - a `count` variable the skipped rows aren't counted as output. - * Various changes to the output XML format. diff --git a/NOTICE b/NOTICE deleted file mode 100644 index d547e55f..00000000 --- a/NOTICE +++ /dev/null @@ -1,51 +0,0 @@ -Apache CouchDB -Copyright 2009 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - -This product also includes the following third-party components: - - * ac_check_icu.m4 (http://autoconf-archive.cryp.to/ac_check_icu.html) - - Copyright 2008, Akos Maroy - - * ac_check_curl.m4 (http://autoconf-archive.cryp.to/ac_check_curl.html) - - Copyright 2008, Akos Maroy - - * jQuery (http://jquery.com/) - - Copyright 2010, John Resig - - * json2.js (http://www.json.org/) - - In the public domain - - * MochiWeb (http://code.google.com/p/mochiweb/) - - Copyright 2007, Mochi Media Coporation - - * ibrowse (http://github.com/cmullaparthi/ibrowse/tree/master) - - Copyright 2009, Chandrashekhar Mullaparthi - - * Erlang OAuth (http://github.com/tim/erlang-oauth/tree/master) - - Copyright 2009, Tim Fletcher - - * ETap (http://github.com/ngerakines/etap/) - - Copyright 2009, Nick Gerakines - - * mimeparse.js (http://code.google.com/p/mimeparse/) - - Copyright 2009, Chris Anderson - - * base64.js - - Copyright 1999, Masanao Izumo - -* jspec.js (http://visionmedia.github.com/jspec/) - - Copyright 2010 TJ Holowaychuk diff --git a/README b/README deleted file mode 100644 index 540226d3..00000000 --- a/README +++ /dev/null @@ -1,81 +0,0 @@ -Apache CouchDB README -===================== - -Installation ------------- - -For a low-level guide, see: - - INSTALL - -For a high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu, see: - - INSTALL.Unix - -For a high-level guide to Microsoft Windows, see: - - INSTALL.Windows - -Follow the proper instructions to get CouchDB installed on your system. - -If you're having problems, skip to the next section. - -Troubleshooting ----------------- - -For troubleshooting, see: - - http://wiki.apache.org/couchdb/Troubleshooting - -If you're getting a cryptic error message, see: - - http://wiki.apache.org/couchdb/Error_messages - -For general help, see: - - http://couchdb.apache.org/community/lists.html - -The mailing lists provide a wealth of support and knowledge for you to tap into. -Feel free to drop by with your questions or discussion. See the official CouchDB -website for more information about our community resources. - - -Running the Testsuite ---------------------- - -Run the testsuite for couch.js and jquery.couch.js by browsing to this site: http://127.0.0.1:5984/_utils/spec/run.html -It should work in at least Firefox >= 3.6 and Safari >= 4.0.4. - -Read more about JSpec here: http://jspec.info/ - -Trouble shooting -~~~~~~~~~~~~~~~~ - - * When you change the specs, but your changes have no effect, manually reload the changed spec file in the browser. - - * When the spec that tests erlang views fails, make sure you have enabled erlang views as described here: - - -Cryptographic Software Notice ------------------------------ - -This distribution includes cryptographic software. The country in which you -currently reside may have restrictions on the import, possession, use, and/or -re-export to another country, of encryption software. BEFORE using any -encryption software, please check your country's laws, regulations and policies -concerning the import, possession, or use, and re-export of encryption software, -to see if this is permitted. See for more -information. - -The U.S. Government Department of Commerce, Bureau of Industry and Security -(BIS), has classified this software as Export Commodity Control Number (ECCN) -5D002.C.1, which includes information security software using or performing -cryptographic functions with asymmetric algorithms. The form and manner of this -Apache Software Foundation distribution makes it eligible for export under the -License Exception ENC Technology Software Unrestricted (TSU) exception (see the -BIS Export Administration Regulations, Section 740.13) for both object code and -source code. - -The following provides more details on the included cryptographic software: - -CouchDB includes a HTTP client (ibrowse) with SSL functionality. diff --git a/THANKS b/THANKS deleted file mode 100644 index ebee4845..00000000 --- a/THANKS +++ /dev/null @@ -1,69 +0,0 @@ -Apache CouchDB THANKS -===================== - -A number of people have contributed to Apache CouchDB by reporting problems, -suggesting improvements or submitting changes. Some of these people are: - - * William Beh - * Dirk Schalge - * Roger Leigh - * Sam Ruby - * Carlos Valiente - * Till Klampaeckel - * Jim Lindley - * Yoan Blanc - * Michael Gottesman - * Mark Baran - * Michael Hendricks - * Antony Blakey - * Paul Carey - * Hunter Morris - * Brian Palmer - * Maximillian Dornseif - * Eric Casteleijn - * Maarten Thibaut - * Florian Ebeling - * Volker Mische - * Brian Candler - * Brad Anderson - * Nick Gerakines - * Bob Dionne - * Kevin Ilchmann JĆørgensen - * Dirkjan Ochtman - * Sebastian Cohnen - * Sven Helmberger - * Dan Walters - * Curt Arnold - * Gustavo Niemeyer - * Joshua Bronson - * Kostis Sagonas - * Matthew Hooker - * Ilia Cheishvili - * Lena Herrmann - * Jack Moffit - * Damjan Georgievski - * Jan Kassens - * James Marca - * Matt Goodall - * Joel Clark - * Matt Lyon - * mikeal - * Randall Leeds - * Joscha Feth - * Jarrod Roberson - * Jae Kwon - * Gavin Sherry - * Timothy Smith - * Martin HaaƟ - * Hans Ulrich Niedermann - * Jason Smith - * Dmitry Unkovsky - * Zachary Zolton - * Brian Jenkins - * Paul Bonser - * Caleb Land - * Juhani RƤnkimies - * Lim Yue Chuan - * David Davis - -For a list of authors see the `AUTHORS` file. diff --git a/apps/couch/AUTHORS b/apps/couch/AUTHORS new file mode 100644 index 00000000..b1a3559e --- /dev/null +++ b/apps/couch/AUTHORS @@ -0,0 +1,20 @@ +Apache CouchDB AUTHORS +====================== + +A number of people have contributed directly to Apache CouchDB by writing +documentation or developing software. Some of these people are: + + * Damien Katz + * Jan Lehnardt + * Noah Slater + * Christopher Lenz + * J. Chris Anderson + * Paul Joseph Davis + * Adam Kocoloski + * Jason Davies + * Mark Hammond + * BenoĆ®t Chesneau + * Filipe Manana + * Robert Newson + +For a list of other credits see the `THANKS` file. diff --git a/apps/couch/BUGS b/apps/couch/BUGS new file mode 100644 index 00000000..8cd1d161 --- /dev/null +++ b/apps/couch/BUGS @@ -0,0 +1,6 @@ +Apache CouchDB BUGS +=================== + +Please see the [documentation][1] on how to report bugs with Apache CouchDB. + +[1] http://couchdb.apache.org/community/issues.html diff --git a/apps/couch/CHANGES b/apps/couch/CHANGES new file mode 100644 index 00000000..08458f10 --- /dev/null +++ b/apps/couch/CHANGES @@ -0,0 +1,582 @@ +Apache CouchDB CHANGES +====================== + +Version 1.0.1 +------------- + +Storage System: + + * Fix data corruption bug COUCHDB-844. Please see + http://couchdb.apache.org/notice/1.0.1.html for details. + +Replicator: + + * Added support for replication via an HTTP/HTTP proxy. + * Fix pull replication of attachments from 0.11 to 1.0.x. + * Make the _changes feed work with non-integer seqnums. + +HTTP Interface: + + * Expose `committed_update_seq` for monitoring purposes. + * Show fields saved along with _deleted=true. Allows for auditing of deletes. + * More robust Accept-header detection. + +Authentication: + + * Enable basic-auth popup when required to access the server, to prevent + people from getting locked out. + +Futon: + + * User interface element for querying stale (cached) views. + +Build and System Integration: + + * Included additional source files for distribution. + +Version 1.0.0 +------------- + +Security: + + * Added authentication caching, to avoid repeated opening and closing of the + users database for each request requiring authentication. + +Storage System: + + * Small optimization for reordering result lists. + * More efficient header commits. + * Use O_APPEND to save lseeks. + * Faster implementation of pread_iolist(). Further improves performance on + concurrent reads. + +View Server: + + * Faster default view collation. + * Added option to include update_seq in view responses. + +Version 0.11.2 +-------------- + +Replicator: + + * Fix bug when pushing design docs by non-admins, which was hanging the + replicator for no good reason. + * Fix bug when pulling design documents from a source that requires + basic-auth. + +HTTP Interface: + + * Better error messages on invalid URL requests. + +Authentication: + + * User documents can now be deleted by admins or the user. + +Security: + + * Avoid potential DOS attack by guarding all creation of atoms. + +Futon: + + * Add some Futon files that were missing from the Makefile. + +Version 0.11.1 +-------------- + +HTTP Interface: + + * Mask passwords in active tasks and logging. + * Update mochijson2 to allow output of BigNums not in float form. + * Added support for X-HTTP-METHOD-OVERRIDE. + * Better error message for database names. + * Disable jsonp by default. + * Accept gzip encoded standalone attachments. + * Made max_concurrent_connections configurable. + * Made changes API more robust. + * Send newly generated document rev to callers of an update function. + +Futon: + + * Use "expando links" for over-long document values in Futon. + * Added continuous replication option. + * Added option to replicating test results anonymously to a community + CouchDB instance. + * Allow creation and deletion of config entries. + * Fixed display issues with doc ids that have escaped characters. + * Fixed various UI issues. + +Build and System Integration: + + * Output of `couchdb --help` has been improved. + * Fixed compatibility with the Erlang R14 series. + * Fixed warnings on Linux builds. + * Fixed build error when aclocal needs to be called during the build. + * Require ICU 4.3.1. + * Fixed compatibility with Solaris. + +Security: + + * Added authentication redirect URL to log in clients. + * Fixed query parameter encoding issue in oauth.js. + * Made authentication timeout configurable. + * Temporary views are now admin-only resources. + +Storage System: + + * Don't require a revpos for attachment stubs. + * Added checking to ensure when a revpos is sent with an attachment stub, + it's correct. + * Make file deletions async to avoid pauses during compaction and db + deletion. + * Fixed for wrong offset when writing headers and converting them to blocks, + only triggered when header is larger than 4k. + * Preserve _revs_limit and instance_start_time after compaction. + +Configuration System: + + * Fixed timeout with large .ini files. + +JavaScript Clients: + + * Added tests for couch.js and jquery.couch.js + * Added changes handler to jquery.couch.js. + * Added cache busting to jquery.couch.js if the user agent is msie. + * Added support for multi-document-fetch (via _all_docs) to jquery.couch.js. + * Added attachment versioning to jquery.couch.js. + * Added option to control ensure_full_commit to jquery.couch.js. + * Added list functionality to jquery.couch.js. + * Fixed issues where bulkSave() wasn't sending a POST body. + +View Server: + + * Provide a UUID to update functions (and all other functions) that they can + use to create new docs. + * Upgrade CommonJS modules support to 1.1.1. + * Fixed erlang filter funs and normalize filter fun API. + * Fixed hang in view shutdown. + +Log System: + + * Log HEAD requests as HEAD, not GET. + * Keep massive JSON blobs out of the error log. + * Fixed a timeout issue. + +Replication System: + + * Refactored various internal APIs related to attachment streaming. + * Fixed hanging replication. + * Fixed keepalive issue. + +URL Rewriter & Vhosts: + + * Allow more complex keys in rewriter. + * Allow global rewrites so system defaults are available in vhosts. + * Allow isolation of databases with vhosts. + * Fix issue with passing variables to query parameters. + +Test Suite: + + * Made the test suite overall more reliable. + +Version 0.11.0 +-------------- + +Security: + + * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. + * Added default cookie-authentication and users database. + * Added Futon user interface for user signup and login. + * Added per-database reader access control lists. + * Added per-database security object for configuration data in validation + functions. + * Added proxy authentication handler + +HTTP Interface: + + * Provide Content-MD5 header support for attachments. + * Added URL Rewriter handler. + * Added virtual host handling. + +View Server: + + * Added optional 'raw' binary collation for faster view builds where Unicode + collation is not important. + * Improved view index build time by reducing ICU collation callouts. + * Improved view information objects. + * Bug fix for partial updates during view builds. + * Move query server to a design-doc based protocol. + * Use json2.js for JSON serialization for compatiblity with native JSON. + * Major refactoring of couchjs to lay the groundwork for disabling cURL + support. The new HTTP interaction acts like a synchronous XHR. Example usage + of the new system is in the JavaScript CLI test runner. + +Replication: + + * Added option to implicitly create replication target databases. + * Avoid leaking file descriptors on automatic replication restarts. + * Added option to replicate a list of documents by id. + * Allow continuous replication to be cancelled. + +Storage System: + + * Adds batching of multiple updating requests, to improve throughput with many + writers. Removed the now redundant couch_batch_save module. + * Adds configurable compression of attachments. + +Runtime Statistics: + + * Statistics are now calculated for a moving window instead of non-overlapping + timeframes. + * Fixed a problem with statistics timers and system sleep. + * Moved statistic names to a term file in the priv directory. + +Futon: + + * Added a button for view compaction. + * JSON strings are now displayed as-is in the document view, without the escaping of + new-lines and quotes. That dramatically improves readability of multi-line + strings. + * Same goes for editing of JSON string values. When a change to a field value is + submitted, and the value is not valid JSON it is assumed to be a string. This + improves editing of multi-line strings a lot. + * Hitting tab in textareas no longer moves focus to the next form field, but simply + inserts a tab character at the current caret position. + * Fixed some font declarations. + +Build and System Integration: + + * Updated and improved source documentation. + * Fixed distribution preparation for building on Mac OS X. + * Added support for building a Windows installer as part of 'make dist'. + * Bug fix for building couch.app's module list. + * ETap tests are now run during make distcheck. This included a number of + updates to the build system to properly support VPATH builds. + * Gavin McDonald setup a build-bot instance. More info can be found at + http://ci.apache.org/buildbot.html + +Version 0.10.1 +-------------- + +Replicator: + + * Stability enhancements regarding redirects, timeouts, OAuth. + +Query Server: + + * Avoid process leaks + * Allow list and view to span languages + +Stats: + + * Eliminate new process flood on system wake + +Build and System Integration: + + * Test suite now works with the distcheck target. + +Version 0.10.0 +-------------- + +Storage Format: + + * Add move headers with checksums to the end of database files for extra robust + storage and faster storage. + +View Server: + + * Added native Erlang views for high-performance applications. + +HTTP Interface: + + * Added optional cookie-based authentication handler. + * Added optional two-legged OAuth authentication handler. + +Build and System Integration: + + * Changed `couchdb` script configuration options. + * Added default.d and local.d configuration directories to load sequence. + + +Version 0.9.2 +------------- + +Replication: + + * Fix replication with 0.10 servers initiated by an 0.9 server (COUCHDB-559). + +Build and System Integration: + + * Remove branch callbacks to allow building couchjs against newer versions of + Spidermonkey. + +Version 0.9.1 +------------- + +Build and System Integration: + + * PID file directory is now created by the SysV/BSD daemon scripts. + * Fixed the environment variables shown by the configure script. + * Fixed the build instructions shown by the configure script. + * Updated ownership and permission advice in `README` for better security. + +Configuration and stats system: + + * Corrected missing configuration file error message. + * Fixed incorrect recording of request time. + +Database Core: + + * Document validation for underscore prefixed variables. + * Made attachment storage less sparse. + * Fixed problems when a database with delayed commits pending is considered + idle, and subject to losing changes when shutdown. (COUCHDB-334) + +External Handlers: + + * Fix POST requests. + +Futon: + + * Redirect when loading a deleted view URI from the cookie. + +HTTP Interface: + + * Attachment requests respect the "rev" query-string parameter. + +JavaScript View Server: + + * Useful JavaScript Error messages. + +Replication: + + * Added support for Unicode characters transmitted as UTF-16 surrogate pairs. + * URL-encode attachment names when necessary. + * Pull specific revisions of an attachment, instead of just the latest one. + * Work around a rare chunk-merging problem in ibrowse. + * Work with documents containing Unicode characters outside the Basic + Multilingual Plane. + +Version 0.9.0 +------------- + +Futon Utility Client: + + * Added pagination to the database listing page. + * Implemented attachment uploading from the document page. + * Added page that shows the current configuration, and allows modification of + option values. + * Added a JSON "source view" for document display. + * JSON data in view rows is now syntax highlighted. + * Removed the use of an iframe for better integration with browser history and + bookmarking. + * Full database listing in the sidebar has been replaced by a short list of + recent databases. + * The view editor now allows selection of the view language if there is more + than one configured. + * Added links to go to the raw view or document URI. + * Added status page to display currently running tasks in CouchDB. + * JavaScript test suite split into multiple files. + * Pagination for reduce views. + +Design Document Resource Paths: + + * Added httpd_design_handlers config section. + * Moved _view to httpd_design_handlers. + * Added ability to render documents as non-JSON content-types with _show and + _list functions, which are also httpd_design_handlers. + +HTTP Interface: + + * Added client side UUIDs for idempotent document creation + * HTTP COPY for documents + * Streaming of chunked attachment PUTs to disk + * Remove negative count feature + * Add include_docs option for view queries + * Add multi-key view post for views + * Query parameter validation + * Use stale=ok to request potentially cached view index + * External query handler module for full-text or other indexers. + * Etags for attachments, views, shows and lists + * Show and list functions for rendering documents and views as developer + controlled content-types. + * Attachment names may use slashes to allow uploading of nested directories + (useful for static web hosting). + * Option for a view to run over design documents. + * Added newline to JSON responses. Closes bike-shed. + +Replication: + + * Using ibrowse. + * Checkpoint replications so failures are less expensive. + * Automatically retry of failed replications. + * Stream attachments in pull-replication. + +Database Core: + + * Faster B-tree implementation. + * Changed internal JSON term format. + * Improvements to Erlang VM interactions under heavy load. + * User context and administrator role. + * Update validations with design document validation functions. + * Document purge functionality. + * Ref-counting for database file handles. + +Build and System Integration: + + * The `couchdb` script now supports system chainable configuration files. + * The Mac OS X daemon script now redirects STDOUT and STDERR like SysV/BSD. + * The build and system integration have been improved for portability. + * Added COUCHDB_OPTIONS to etc/default/couchdb file. + * Remove COUCHDB_INI_FILE and COUCHDB_PID_FILE from etc/default/couchdb file. + * Updated `configure.ac` to manually link `libm` for portability. + * Updated `configure.ac` to extended default library paths. + * Removed inets configuration files. + * Added command line test runner. + * Created dev target for make. + +Configuration and stats system: + + * Separate default and local configuration files. + * HTTP interface for configuration changes. + * Statistics framework with HTTP query API. + +Version 0.8.1-incubating +------------------------ + +Database Core: + + * Fix for replication problems where the write queues can get backed up if the + writes aren't happening fast enough to keep up with the reads. For a large + replication, this can exhaust memory and crash, or slow down the machine + dramatically. The fix keeps only one document in the write queue at a time. + * Fix for databases sometimes incorrectly reporting that they contain 0 + documents after compaction. + * CouchDB now uses ibrowse instead of inets for its internal HTTP client + implementation. This means better replication stability. + +HTTP Interface: + + * Fix for chunked responses where chunks were always being split into multiple + TCP packets, which caused problems with the test suite under Safari, and in + some other cases. + * Fix for an invalid JSON response body being returned for some kinds of + views. (COUCHDB-84) + * Fix for connections not getting closed after rejecting a chunked request. + (COUCHDB-55) + * CouchDB can now be bound to IPv6 addresses. + * The HTTP `Server` header now contains the versions of CouchDB and Erlang. + +JavaScript View Server: + + * Sealing of documents has been disabled due to an incompatibility with + SpiderMonkey 1.9. + * Improve error handling for undefined values emitted by map functions. + (COUCHDB-83) + +Build and System Integration: + + * The `couchdb` script no longer uses `awk` for configuration checks as this + was causing portability problems. + * Updated `sudo` example in `README` to use the `-i` option, this fixes + problems when invoking from a directory the `couchdb` user cannot access. + +Futon: + + * The view selector dropdown should now work in Opera and Internet Explorer + even when it includes optgroups for design documents. (COUCHDB-81) + +Version 0.8.0-incubating +------------------------ + +Database Core: + + * The view engine has been completely decoupled from the storage engine. Index + data is now stored in separate files, and the format of the main database + file has changed. + * Databases can now be compacted to reclaim space used for deleted documents + and old document revisions. + * Support for incremental map/reduce views has been added. + * To support map/reduce, the structure of design documents has changed. View + values are now JSON objects containing at least a `map` member, and + optionally a `reduce` member. + * View servers are now identified by name (for example `javascript`) instead of + by media type. + * Automatically generated document IDs are now based on proper UUID generation + using the crypto module. + * The field `content-type` in the JSON representation of attachments has been + renamed to `content_type` (underscore). + +HTTP Interface: + + * CouchDB now uses MochiWeb instead of inets for the HTTP server + implementation. Among other things, this means that the extra configuration + files needed for inets (such as `couch_httpd.conf`) are no longer used. + * The HTTP interface now completely supports the `HEAD` method. (COUCHDB-3) + * Improved compliance of `Etag` handling with the HTTP specification. + (COUCHDB-13) + * Etags are no longer included in responses to document `GET` requests that + include query string parameters causing the JSON response to change without + the revision or the URI having changed. + * The bulk document update API has changed slightly on both the request and the + response side. In addition, bulk updates are now atomic. + * CouchDB now uses `TCP_NODELAY` to fix performance problems with persistent + connections on some platforms due to nagling. + * Including a `?descending=false` query string parameter in requests to views + no longer raises an error. + * Requests to unknown top-level reserved URLs (anything with a leading + underscore) now return a `unknown_private_path` error instead of the + confusing `illegal_database_name`. + * The Temporary view handling now expects a JSON request body, where the JSON + is an object with at least a `map` member, and optional `reduce` and + `language` members. + * Temporary views no longer determine the view server based on the Content-Type + header of the `POST` request, but rather by looking for a `language` member + in the JSON body of the request. + * The status code of responses to `DELETE` requests is now 200 to reflect that + that the deletion is performed synchronously. + +JavaScript View Server: + + * SpiderMonkey is no longer included with CouchDB, but rather treated as a + normal external dependency. A simple C program (`_couchjs`) is provided that + links against an existing SpiderMonkey installation and uses the interpreter + embedding API. + * View functions using the default JavaScript view server can now do logging + using the global `log(message)` function. Log messages are directed into the + CouchDB log at `INFO` level. (COUCHDB-59) + * The global `map(key, value)` function made available to view code has been + renamed to `emit(key, value)`. + * Fixed handling of exceptions raised by view functions. + +Build and System Integration: + + * CouchDB can automatically respawn following a server crash. + * Database server no longer refuses to start with a stale PID file. + * System logrotate configuration provided. + * Improved handling of ICU shared libraries. + * The `couchdb` script now automatically enables SMP support in Erlang. + * The `couchdb` and `couchjs` scripts have been improved for portability. + * The build and system integration have been improved for portability. + +Futon: + + * When adding a field to a document, Futon now just adds a field with an + autogenerated name instead of prompting for the name with a dialog. The name + is automatically put into edit mode so that it can be changed immediately. + * Fields are now sorted alphabetically by name when a document is displayed. + * Futon can be used to create and update permanent views. + * The maximum number of rows to display per page on the database page can now + be adjusted. + * Futon now uses the XMLHTTPRequest API asynchronously to communicate with the + CouchDB HTTP server, so that most operations no longer block the browser. + * View results sorting can now be switched between ascending and descending by + clicking on the `Key` column header. + * Fixed a bug where documents that contained a `@` character could not be + viewed. (COUCHDB-12) + * The database page now provides a `Compact` button to trigger database + compaction. (COUCHDB-38) + * Fixed portential double encoding of document IDs and other URI segments in + many instances. (COUCHDB-39) + * Improved display of attachments. + * The JavaScript Shell has been removed due to unresolved licensing issues. diff --git a/apps/couch/DEVELOPERS b/apps/couch/DEVELOPERS new file mode 100644 index 00000000..a7a6926e --- /dev/null +++ b/apps/couch/DEVELOPERS @@ -0,0 +1,95 @@ +Apache CouchDB DEVELOPERS +========================= + +Only follow these instructions if you are building from a source checkout. + +If you're unsure what this means, ignore this document. + +Dependencies +------------ + +You will need the following installed: + + * GNU Automake (>=1.6.3) (http://www.gnu.org/software/automake/) + * GNU Autoconf (>=2.59) (http://www.gnu.org/software/autoconf/) + * GNU Libtool (http://www.gnu.org/software/libtool/) + * GNU help2man (http://www.gnu.org/software/help2man/) + +The `help2man` tool is optional, but will generate `man` pages for you. + +Debian-based (inc. Ubuntu) Systems +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can install the dependencies by running: + + apt-get install automake autoconf libtool help2man + +Be sure to update the version numbers to match your system's available packages. + +Mac OS X +~~~~~~~~ + +You can install the dependencies by running: + + port install automake autoconf libtool help2man + +You will need MacPorts installed to use the `port` command. + +Bootstrapping +------------- + +Bootstrap the pristine source by running: + + ./bootstrap + +You must repeat this step every time you update your source checkout. + +Testing +------- + +Check the test suite by running: + + make check + +Generate a coverage report by running: + + make cover + +Please report any problems to the developer's mailing list. + +Releasing +--------- + +Unix-like Systems +~~~~~~~~~~~~~~~~~ + +Configure the source by running: + + ./configure + +Prepare the release artefacts by running: + + make distcheck + +You can prepare signed release artefacts by running: + + make distsign + +The release artefacts can be found in the root source directory. + +Microsoft Windows +~~~~~~~~~~~~~~~~~ + +Configure the source by running: + + ./configure + +Prepare the release artefacts by running: + + make dist + +The release artefacts can be found in the `etc/windows` directory. + +Until the build system has been improved, you must make sure that you run this +command from a clean source checkout. If you do not, your test database and log +files will be bundled up in the release artefact. diff --git a/apps/couch/INSTALL.Unix b/apps/couch/INSTALL.Unix new file mode 100644 index 00000000..768e3846 --- /dev/null +++ b/apps/couch/INSTALL.Unix @@ -0,0 +1,231 @@ +Apache CouchDB README.Unix +========================== + +A high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu. + +Dependencies +------------ + +You will need the following installed: + + * Erlang OTP (>=R12B5) (http://erlang.org/) + * ICU (http://icu.sourceforge.net/) + * OpenSSL (http://www.openssl.org/) + * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/) + * libcurl (http://curl.haxx.se/libcurl/) + * GNU Make (http://www.gnu.org/software/make/) + * GNU Compiler Collection (http://gcc.gnu.org/) + +It is recommended that you install Erlang OTP R12B-5 or above where possible. + +Ubuntu +~~~~~~ + +See + + http://wiki.apache.org/couchdb/Installing_on_Ubuntu + +for updated instructions on how to install on Ubuntu. + +Debian-based Systems +~~~~~~~~~~~~~~~~~~~~ + +You can install the build tools by running: + + sudo apt-get install build-essential + +You can install the other dependencies by running: + + sudo apt-get install erlang libicu-dev libmozjs-dev libcurl4-openssl-dev + +Be sure to update the version numbers to match your system's available packages. + +Mac OS X +~~~~~~~~ + +You can install the build tools by running: + + open /Applications/Installers/Xcode\ Tools/XcodeTools.mpkg + +You can install the other dependencies by running: + + sudo port install icu erlang spidermonkey curl + +You will need MacPorts installed to use the `port` command. + +Installing +---------- + +Once you have satisfied the dependencies you should run: + + ./configure + +This script will configure CouchDB to be installed into `/usr/local` by default. + +If you wish to customise the installation, pass `--help` to this script. + +If everything was successful you should see the following message: + + You have configured Apache CouchDB, time to relax. + +Relax. + +To install CouchDB you should run: + + make && sudo make install + +You only need to use `sudo` if you're installing into a system directory. + +Try `gmake` if `make` is giving you any problems. + +If everything was successful you should see the following message: + + You have installed Apache CouchDB, time to relax. + +Relax. + +First Run +--------- + +You can start the CouchDB server by running: + + sudo -i -u couchdb couchdb + +This uses the `sudo` command to run the `couchdb` command as the `couchdb` user. + +When CouchDB starts it should eventually display the following message: + + Apache CouchDB has started, time to relax. + +Relax. + +To check that everything has worked, point your web browser to: + + http://127.0.0.1:5984/_utils/index.html + +From here you should run the test suite. + +Security Considerations +----------------------- + +You should create a special `couchdb` user for CouchDB. + +On many Unix-like systems you can run: + + adduser --system \ + --home /usr/local/var/lib/couchdb \ + --no-create-home \ + --shell /bin/bash \ + --group --gecos \ + "CouchDB Administrator" couchdb + +On Mac OS X you can use the Workgroup Manager to create users: + + http://www.apple.com/support/downloads/serveradmintools1047.html + +You must make sure that: + + * The user has a working POSIX shell + + * The user's home directory is `/usr/local/var/lib/couchdb` + +You can test this by: + + * Trying to log in as the `couchdb` user + + * Running `pwd` and checking the present working directory + +Change the ownership of the CouchDB directories by running: + + chown -R couchdb:couchdb /usr/local/etc/couchdb + chown -R couchdb:couchdb /usr/local/var/lib/couchdb + chown -R couchdb:couchdb /usr/local/var/log/couchdb + chown -R couchdb:couchdb /usr/local/var/run/couchdb + +Change the permission of the CouchDB directories by running: + + chmod 0770 /usr/local/etc/couchdb + chmod 0770 /usr/local/var/lib/couchdb + chmod 0770 /usr/local/var/log/couchdb + chmod 0770 /usr/local/var/run/couchdb + +Running as a Daemon +------------------- + +SysV/BSD-style Systems +~~~~~~~~~~~~~~~~~~~~~~ + +You can use the `couchdb` init script to control the CouchDB daemon. + +On SysV-style systems, the init script will be installed into: + + /usr/local/etc/init.d + +On BSD-style systems, the init script will be installed into: + + /usr/local/etc/rc.d + +We use the `[init.d|rc.d]` notation to refer to both of these directories. + +You can control the CouchDB daemon by running: + + /usr/local/etc/[init.d|rc.d]/couchdb [start|stop|restart|status] + +If you wish to configure how the init script works, you can edit: + + /usr/local/etc/default/couchdb + +Comment out the `COUCHDB_USER` setting if you're running as a non-superuser. + +To start the daemon on boot, copy the init script to: + + /etc/[init.d|rc.d] + +You should then configure your system to run the init script automatically. + +You may be able to run: + + sudo update-rc.d couchdb defaults + +If this fails, consult your system documentation for more information. + +A `logrotate` configuration is installed into: + + /usr/local/etc/logrotate.d/couchdb + +Consult your `logrotate` documentation for more information. + +It is critical that the CouchDB logs are rotated so as not to fill your disk. + +Mac OS X +~~~~~~~~ + +You can use the `launchctl` command to control the CouchDB daemon. + +You can load the configuration by running: + + sudo launchctl load \ + /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist + +You can stop the CouchDB daemon by running: + + sudo launchctl unload \ + /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist + +You can start CouchDB by running: + + sudo launchctl start org.apache.couchdb + +You can restart CouchDB by running: + + sudo launchctl stop org.apache.couchdb + +You can edit the launchd configuration by running: + + open /usr/local/Library/LaunchDaemons/org.apache.couchdb.plist + +To start the daemon on boot, copy the configuration file to: + + /Library/LaunchDaemons + +Consult your system documentation for more information. diff --git a/apps/couch/INSTALL.Windows b/apps/couch/INSTALL.Windows new file mode 100644 index 00000000..5c4a9587 --- /dev/null +++ b/apps/couch/INSTALL.Windows @@ -0,0 +1,148 @@ +Apache CouchDB README.Windows +============================== + +For a high-level guide to Microsoft Windows. + +Dependencies +------------ + +You will need the following installed: + + * Erlang OTP (>=R12B5) (http://erlang.org/) + * ICU (http://icu.sourceforge.net/) + * OpenSSL (http://www.openssl.org/) + * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/) + * libcurl (http://curl.haxx.se/libcurl/) + * Cygwin (http://www.cygwin.com/) + * Visual Studio 2008 (http://msdn.microsoft.com/en-gb/vstudio/default.aspx) + +General Notes +------------- + + * When installing Erlang, you must build it from source. + + The CouchDB build makes use of a number of the Erlang build scripts. + + * When installing ICU, select the binaries built with Visual Studio 2008. + + * When installing Cygwin, be sure to select all the `development` tools. + + * When installing libcurl, be sure to install by hand. + + The Cygwin binaries are incompatible and will not work with Erlang. + +Setting Up Cygwin +----------------- + +Before starting any Cygwin terminals, run: + + set CYGWIN=nontsec + +To set up your environment, run: + + [VS_BIN]/vcvars32.bat + +Replace [VS_BIN] with the path to your Visual Studio `bin` directory. + +You must check that: + + * The `which link` command points to the Microsoft linker. + + * The `which cl` command points to the Microsoft compiler. + +If you do not do this, the ones found in `/usr/bin` may be used instead. + +Building Erlang +--------------- + +You must include OpenSSL. + +However, you can skip the GUI tools by running: + + echo "skipping gs" > lib/gs/SKIP + + echo "skipping ic" > lib/ic/SKIP + +Follow the rest of the Erlang instructions as described. + +After running: + + ./otp_build release -a + +You should run: + + ./release/win32/Install.exe + +This will set up the release/win32/bin directory correctly. + +To set up your environment for building CouchDB, run: + + eval `./otp_build env_win32` + +To set up the `ERL_TOP` environment variable, run: + + export ERL_TOP=[ERL_TOP] + +Replace `[ERL_TOP]` with the Erlang source directory name. + +Remember to use `/cygdrive/c/` instead of `c:/` as the directory prefix. + +To set up your path, run: + + export PATH=$ERL_TOP/release/win32/erts-5.7.2/bin:$PATH + +If everything was successful, you should be ready to build CouchDB. + +Relax. + +Building CouchDB +---------------- + +Once you have satisfied the dependencies you should run: + + ./configure \ + --with-js-include=/cygdrive/c/path_to_seamonkey_include \ + --with-js-lib=/cygdrive/c/path_to_seamonkey_lib \ + --with-win32-icu-binaries=/cygdrive/c/path_to_icu_binaries_root \ + --with-erlang=$ERL_TOP/release/win32/usr/include \ + --with-win32-curl=/cygdrive/c/path/to/curl/root/directory \ + --with-openssl-bin-dir=/cygdrive/c/openssl/bin \ + --with-msvc-redist-dir=/cygdrive/c/dir/with/vcredist_platform_executable \ + --prefix=$ERL_TOP/release/win32 + +This command could take a while to complete. + +If everything was successful you should see the following message: + + You have configured Apache CouchDB, time to relax. + +Relax. + +To install CouchDB you should run: + + make install + +If everything was successful you should see the following message: + + You have installed Apache CouchDB, time to relax. + +Relax. + +First Run +--------- + +You can start the CouchDB server by running: + + $ERL_TOP/release/win32/bin/couchdb.bat + +When CouchDB starts it should eventually display the following message: + + Apache CouchDB has started, time to relax. + +Relax. + +To check that everything has worked, point your web browser to: + + http://127.0.0.1:5984/_utils/index.html + +From here you should run the test suite. diff --git a/apps/couch/LICENSE b/apps/couch/LICENSE new file mode 100644 index 00000000..20a425b1 --- /dev/null +++ b/apps/couch/LICENSE @@ -0,0 +1,371 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +Apache CouchDB Subcomponents + +The Apache CouchDB project includes a number of subcomponents with separate +copyright notices and license terms. Your use of the code for the these +subcomponents is subject to the terms and conditions of the following licenses. + +For the m4/ac_check_icu.m4 component: + + Copyright (c) 2005 Akos Maroy + + Copying and distribution of this file, with or without modification, are + permitted in any medium without royalty provided the copyright notice + and this notice are preserved. + +For the share/www/script/jquery.js component: + + Copyright (c) 2009 John Resig, http://jquery.com/ + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For the share/www/script/jquery.form.js component: + + http://malsup.com/jquery/form/ + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For the share/www/script/json2.js component: + + Public Domain + + No warranty expressed or implied. Use at your own risk. + +For the src/mochiweb component: + + Copyright (c) 2007 Mochi Media, Inc. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For the src/ibrowse component: + + Copyright (c) 2006, Chandrashekhar Mullaparthi + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the T-Mobile nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON + ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +For the src/erlang-oauth component: + + Copyright (c) 2008-2009 Tim Fletcher + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + +For the src/etap component: + + Copyright (c) 2008-2009 Nick Gerakines + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + diff --git a/apps/couch/NEWS b/apps/couch/NEWS new file mode 100644 index 00000000..bd6b5d2b --- /dev/null +++ b/apps/couch/NEWS @@ -0,0 +1,276 @@ +Apache CouchDB NEWS +=================== + +For details about backwards incompatible changes, see: + + http://wiki.apache.org/couchdb/Breaking_changes + +Each release section notes when backwards incompatible changes have been made. + +Version 1.0.1 +------------- + + * Fix data corruption bug COUCHDB-844. Please see + http://couchdb.apache.org/notice/1.0.1.html for details. + * Added support for replication via an HTTP/HTTP proxy. + * Fixed various replicator bugs for interop with older CouchDB versions. + * Show fields saved along with _deleted=true. Allows for auditing of deletes. + * Enable basic-auth popup when required to access the server, to prevent + people from getting locked out. + * User interface element for querying stale (cached) views. + +Version 1.0.0 +------------- + + * More efficient header commits. + * Use O_APPEND to save lseeks. + * Faster implementation of pread_iolist(). Further improves performance on + concurrent reads. + * Added authentication caching + * Faster default view collation. + * Added option to include update_seq in view responses. + +Version 0.11.2 +-------------- + + * Replicator buxfixes for replicating design documents from secured databases. + * Better error messages on invalid URL requests. + * User documents can now be deleted by admins or the user. + * Avoid potential DOS attack by guarding all creation of atoms. + * Some Futon and JavaScript library bugfixes. + +Version 0.11.1 +-------------- + + * Mask passwords in active tasks and logging. + * Update mochijson2 to allow output of BigNums not in float form. + * Added support for X-HTTP-METHOD-OVERRIDE. + * Disable jsonp by default. + * Accept gzip encoded standalone attachments. + * Made max_concurrent_connections configurable. + * Added continuous replication option to Futon. + * Added option to replicating test results anonymously to a community + CouchDB instance. + * Allow creation and deletion of config entries in Futon. + * Fixed various UI issues in Futon. + * Fixed compatibility with the Erlang R14 series. + * Fixed warnings on Linux builds. + * Fixed build error when aclocal needs to be called during the build. + * Require ICU 4.3.1. + * Fixed compatibility with Solaris. + * Added authentication redirect URL to log in clients. + * Added authentication caching, to avoid repeated opening and closing of the + users database for each request requiring authentication. + * Made authentication timeout configurable. + * Temporary views are now admin-only resources. + * Don't require a revpos for attachment stubs. + * Make file deletions async to avoid pauses during compaction and db + deletion. + * Fixed for wrong offset when writing headers and converting them to blocks, + only triggered when header is larger than 4k. + * Preserve _revs_limit and instance_start_time after compaction. + * Fixed timeout with large .ini files. + * Added tests for couch.js and jquery.couch.js + * Added various API features to jquery.couch.js + * Faster default view collation. + * Upgrade CommonJS modules support to 1.1.1. + * Added option to include update_seq in view responses. + * Fixed erlang filter funs and normalize filter fun API. + * Fixed hang in view shutdown. + * Refactored various internal APIs related to attachment streaming. + * Fixed hanging replication. + * Fixed keepalive issue. + * Allow global rewrites so system defaults are available in vhosts. + * Allow isolation of databases with vhosts. + * Made the test suite overall more reliable. + +Version 0.11.0 +-------------- + +This version is a feature-freeze release candidate for Apache CouchDB 1.0. + + * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. + * Added support for building a Windows installer as part of 'make dist'. + * Added optional 'raw' binary collation for faster view builds where Unicode + collation is not important. + * Improved view index build time by reducing ICU collation callouts. + * Added option to implicitly create replication target databases. + * Improved view information objects. + * Bug fix for partial updates during view builds. + * Bug fix for building couch.app's module list. + * Fixed a problem with statistics timers and system sleep. + * Improved the statistics calculations to use an online moving window + algorithm. + * Adds batching of multiple updating requests, to improve throughput with many + writers. + * Removed the now redundant couch_batch_save module. + * Bug fix for premature termination of chunked responses. + * Improved speed and concurrency of config lookups. + * Fixed an edge case for HTTP redirects during replication. + * Fixed HTTP timeout handling for replication. + * Fixed query parameter handling in OAuth'd replication. + * Fixed a bug preventing mixing languages with lists and views. + * Avoid OS process leaks in lists. + * Avoid leaking file descriptors on automatic replication restarts. + * Various improvements to the Futon UI. + * Provide Content-MD5 header support for attachments. + * Added default cookie-authentication and users db. + * Added per-db reader access control lists. + * Added per-db security object for configuration data in validation functions. + * Added URL Rewriter handler. + * Added proxy authentication handler. + * Added ability to replicate documents by id. + * Added virtual host handling. + * Uses json2.js for JSON serialization compatiblity with native JSON. + * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. + +Version 0.10.2 +-------------- + + * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. + +Version 0.10.1 +-------------- + + * Fixed test suite to work with build system. + * Fixed a problem with statistics timers and system sleep. + * Fixed an edge case for HTTP redirects during replication. + * Fixed HTTP timeout handling for replication. + * Fixed query parameter handling in OAuth'd replication. + * Fixed a bug preventing mixing languages with lists and views. + * Avoid OS process leaks in lists. + +Version 0.10.0 +-------------- + +This release contains backwards incompatible changes, please see above for help. + + * General performance improvements. + * View index generation speedups. + * Even more robust storage format. + * Native Erlang Views for high-performance applications. + * More robust push and pull replication. + * Two-legged OAuth support for applications and replication (three-legged in + preparation). + * Cookie authentication. + * API detail improvements. + * Better RFC 2616 (HTTP 1.1) compliance. + * Added modular configuration file directories. + * Miscellaneous improvements to build, system integration, and portability. + +Version 0.9.2 +------------- + + * Remove branch callbacks to allow building couchjs against newer versions of + Spidermonkey. + * Fix replication with 0.10 servers initiated by an 0.9 server. + +Version 0.9.1 +------------- + + * Various bug fixes for the build system, configuration, statistics reporting, + database core, external handlers, Futon interface, HTTP interface, + JavaScript View Server and replicator. + +Version 0.9.0 +------------- + +This release contains backwards incompatible changes, please see above for help. + + * Modular configuration. + * Performance enhancements for document and view access. + * More resilient replication process. + * Replication streams binary attachments. + * Administrator role and basic authentication. + * Document validation functions in design documents. + * Show and list functions for rendering documents and views as developer + controlled content-types. + * External process server module. + * Attachment uploading from Futon. + * Etags for views, lists, shows, document and attachment requests. + * Miscellaneous improvements to build, system integration, and portability. + +Version 0.8.1-incubating +------------------------ + + * Various bug fixes for replication, compaction, the HTTP interface and the + JavaScript View Server. + +Version 0.8.0-incubating +------------------------ + +This release contains backwards incompatible changes, please see above for help. + + * Changed core licensing to the Apache Software License 2.0. + * Refactoring of the core view and storage engines. + * Added support for incremental map/reduce views. + * Changed database file format. + * Many improvements to Futon, the web administration interface. + * Miscellaneous improvements to build, system integration, and portability. + * Swapped out Erlang's inets HTTP server for the Mochiweb HTTP server. + * SpiderMonkey is no longer included with CouchDB, but rather treated as an + external dependency. + * Added bits of awesome. + +Version 0.7.2 +------------- + + * Small changes to build process and `couchdb` command. + * Database server official port is now 5984 TCP/UDP instead of 8888. + +Version 0.7.1 +------------- + + * Small compatibility issue with Firefox 3 fixed. + +Version 0.7.0 +------------- + + * Infrastructure rewritten to use the GNU build system for portability. + * The built-in database browsing tool has been rewritten to provide a much + nicer interface for interacting directly with CouchDB from your web browser. + * XML and Fabric have been replaced with JSON and JavaScript for data + transport and View definitions. + +Version 0.6.0 +------------- + + * A replication facility is now available. + * CouchPeek can now create, delete and view documents. + * Building from source is easier and less error prone. + +Version 0.5.0 +------------- + + * A built-in CouchPeek utility. + * A full install kit buildable from a single command. + * A new GNU/Linux version is available. An OS X version is coming soon. + +Version 0.4.0 +------------- + + * Non-existent variables are now nil lists. + * Couch error codes and messages are no longer sent in the HTTP fields, + instead they are exclusively returned in the XML body. This is to avoid HTTP + header parsing problems with oddly formed error messages. + * Returned error messages are now logged at the server at the `info` level to + make general debugging easier. + * Fixed a problem where big table builds caused timeout errors. + * Lots of changes in the low level machinery. Most formulas will continue to + function the same. + * Added full compiler support for extended characters in formula source. + * Support for Perl/Ruby like regular expressions. + * Added `total_rows` and `result_start` attributes to tables. + +Version 0.3.0 +------------- + + * CouchDB now fully supports Unicode and locale specific collation via the ICU + library, both in the Fabric engine and computed tables. + * The `in` operator has been added to Fabric. + * The `startdoc` query string variable specifies the starting document to use + if there are multiple rows with identical startkeys. + * The `skip` query string variable specifies the number of rows to skip before + returning results. The `skip` value must be a positive integer. If used with + a `count` variable the skipped rows aren't counted as output. + * Various changes to the output XML format. diff --git a/apps/couch/NOTICE b/apps/couch/NOTICE new file mode 100644 index 00000000..d547e55f --- /dev/null +++ b/apps/couch/NOTICE @@ -0,0 +1,51 @@ +Apache CouchDB +Copyright 2009 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This product also includes the following third-party components: + + * ac_check_icu.m4 (http://autoconf-archive.cryp.to/ac_check_icu.html) + + Copyright 2008, Akos Maroy + + * ac_check_curl.m4 (http://autoconf-archive.cryp.to/ac_check_curl.html) + + Copyright 2008, Akos Maroy + + * jQuery (http://jquery.com/) + + Copyright 2010, John Resig + + * json2.js (http://www.json.org/) + + In the public domain + + * MochiWeb (http://code.google.com/p/mochiweb/) + + Copyright 2007, Mochi Media Coporation + + * ibrowse (http://github.com/cmullaparthi/ibrowse/tree/master) + + Copyright 2009, Chandrashekhar Mullaparthi + + * Erlang OAuth (http://github.com/tim/erlang-oauth/tree/master) + + Copyright 2009, Tim Fletcher + + * ETap (http://github.com/ngerakines/etap/) + + Copyright 2009, Nick Gerakines + + * mimeparse.js (http://code.google.com/p/mimeparse/) + + Copyright 2009, Chris Anderson + + * base64.js + + Copyright 1999, Masanao Izumo + +* jspec.js (http://visionmedia.github.com/jspec/) + + Copyright 2010 TJ Holowaychuk diff --git a/apps/couch/README b/apps/couch/README new file mode 100644 index 00000000..540226d3 --- /dev/null +++ b/apps/couch/README @@ -0,0 +1,81 @@ +Apache CouchDB README +===================== + +Installation +------------ + +For a low-level guide, see: + + INSTALL + +For a high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu, see: + + INSTALL.Unix + +For a high-level guide to Microsoft Windows, see: + + INSTALL.Windows + +Follow the proper instructions to get CouchDB installed on your system. + +If you're having problems, skip to the next section. + +Troubleshooting +---------------- + +For troubleshooting, see: + + http://wiki.apache.org/couchdb/Troubleshooting + +If you're getting a cryptic error message, see: + + http://wiki.apache.org/couchdb/Error_messages + +For general help, see: + + http://couchdb.apache.org/community/lists.html + +The mailing lists provide a wealth of support and knowledge for you to tap into. +Feel free to drop by with your questions or discussion. See the official CouchDB +website for more information about our community resources. + + +Running the Testsuite +--------------------- + +Run the testsuite for couch.js and jquery.couch.js by browsing to this site: http://127.0.0.1:5984/_utils/spec/run.html +It should work in at least Firefox >= 3.6 and Safari >= 4.0.4. + +Read more about JSpec here: http://jspec.info/ + +Trouble shooting +~~~~~~~~~~~~~~~~ + + * When you change the specs, but your changes have no effect, manually reload the changed spec file in the browser. + + * When the spec that tests erlang views fails, make sure you have enabled erlang views as described here: + + +Cryptographic Software Notice +----------------------------- + +This distribution includes cryptographic software. The country in which you +currently reside may have restrictions on the import, possession, use, and/or +re-export to another country, of encryption software. BEFORE using any +encryption software, please check your country's laws, regulations and policies +concerning the import, possession, or use, and re-export of encryption software, +to see if this is permitted. See for more +information. + +The U.S. Government Department of Commerce, Bureau of Industry and Security +(BIS), has classified this software as Export Commodity Control Number (ECCN) +5D002.C.1, which includes information security software using or performing +cryptographic functions with asymmetric algorithms. The form and manner of this +Apache Software Foundation distribution makes it eligible for export under the +License Exception ENC Technology Software Unrestricted (TSU) exception (see the +BIS Export Administration Regulations, Section 740.13) for both object code and +source code. + +The following provides more details on the included cryptographic software: + +CouchDB includes a HTTP client (ibrowse) with SSL functionality. diff --git a/apps/couch/THANKS b/apps/couch/THANKS new file mode 100644 index 00000000..ebee4845 --- /dev/null +++ b/apps/couch/THANKS @@ -0,0 +1,69 @@ +Apache CouchDB THANKS +===================== + +A number of people have contributed to Apache CouchDB by reporting problems, +suggesting improvements or submitting changes. Some of these people are: + + * William Beh + * Dirk Schalge + * Roger Leigh + * Sam Ruby + * Carlos Valiente + * Till Klampaeckel + * Jim Lindley + * Yoan Blanc + * Michael Gottesman + * Mark Baran + * Michael Hendricks + * Antony Blakey + * Paul Carey + * Hunter Morris + * Brian Palmer + * Maximillian Dornseif + * Eric Casteleijn + * Maarten Thibaut + * Florian Ebeling + * Volker Mische + * Brian Candler + * Brad Anderson + * Nick Gerakines + * Bob Dionne + * Kevin Ilchmann JĆørgensen + * Dirkjan Ochtman + * Sebastian Cohnen + * Sven Helmberger + * Dan Walters + * Curt Arnold + * Gustavo Niemeyer + * Joshua Bronson + * Kostis Sagonas + * Matthew Hooker + * Ilia Cheishvili + * Lena Herrmann + * Jack Moffit + * Damjan Georgievski + * Jan Kassens + * James Marca + * Matt Goodall + * Joel Clark + * Matt Lyon + * mikeal + * Randall Leeds + * Joscha Feth + * Jarrod Roberson + * Jae Kwon + * Gavin Sherry + * Timothy Smith + * Martin HaaƟ + * Hans Ulrich Niedermann + * Jason Smith + * Dmitry Unkovsky + * Zachary Zolton + * Brian Jenkins + * Paul Bonser + * Caleb Land + * Juhani RƤnkimies + * Lim Yue Chuan + * David Davis + +For a list of authors see the `AUTHORS` file. diff --git a/apps/couch/license.skip b/apps/couch/license.skip new file mode 100644 index 00000000..a7aa6ec3 --- /dev/null +++ b/apps/couch/license.skip @@ -0,0 +1,107 @@ +\.svn +^AUTHORS +^BUGS +^CHANGES +^DEVELOPERS +^DEVELOPERS.gz +^INSTALL +^INSTALL.Unix +^INSTALL.Unix.gz +^INSTALL.Windows +^INSTALL.Windows.gz +^INSTALL.gz +^LICENSE.gz +^Makefile +^Makefile.in +^NEWS +^NOTICE +^README +^THANKS +^aclocal.m4 +^apache-couchdb-* +^autom4te.cache/* +^bin/Makefile +^bin/Makefile.in +^bin/couchdb.1 +^bin/couchjs.1 +^build-aux/* +^config.* +^configure +^couchdb.stderr +^couchdb.stdout +^cover/.*\.coverdata +^cover/.*\.html +^erl_crash.dump +^etc/Makefile +^etc/Makefile.in +^etc/couchdb/Makefile +^etc/couchdb/Makefile.in +^etc/couchdb/default* +^etc/couchdb/local* +^etc/default/Makefile +^etc/default/Makefile.in +^etc/default/couchdb +^etc/init/Makefile +^etc/init/Makefile.in +^etc/launchd/Makefile +^etc/launchd/Makefile.in +^etc/launchd/org.apache.couchdb.plist.* +^etc/logrotate.d/Makefile +^etc/logrotate.d/Makefile.in +^etc/logrotate.d/couchdb* +^etc/windows/Makefile +^etc/windows/README.txt.tpl +^libtool +^license.skip +^m4/* +^share/Makefile +^share/Makefile.in +^share/server/json2.js +^share/server/mimeparse.js +^share/www/favicon.ico +^share/www/image/* +^share/www/script/jquery.* +^share/www/script/json2.js +^share/www/script/jspec/* +^share/www/script/sha1.js +^share/www/script/base64.js +^share/www/script/test/lorem* +^src/Makefile +^src/Makefile.in +^src/couchdb/.*beam +^src/couchdb/.deps/* +^src/couchdb/Makefile +^src/couchdb/Makefile.in +^src/couchdb/couch.app* +^src/couchdb/couch.app.tpl.in +^src/couchdb/priv/.*o +^src/couchdb/priv/.deps/* +^src/couchdb/priv/Makefile +^src/couchdb/priv/Makefile.in +^src/couchdb/priv/couch_icu_driver.la +^src/couchdb/priv/couchjs +^src/couchdb/priv/couchspawnkillable +^src/couchdb/priv/stat_descriptions.cfg +^src/erlang-oauth/* +^src/etap/* +^src/ibrowse/* +^src/mochiweb/* +^stamp-h1 +^test/Makefile +^test/Makefile.in +^test/bench/Makefile +^test/bench/Makefile.in +^test/etap/.*beam +^test/etap/Makefile +^test/etap/Makefile.in +^test/etap/temp.* +^test/javascript/Makefile +^test/javascript/Makefile.in +^test/local.ini +^test/view_server/Makefile +^test/view_server/Makefile.in +^tmp/* +^utils/Makefile +^utils/Makefile.in +^var/Makefile +^var/Makefile.in diff --git a/apps/couch/test/bench/bench_marks.js b/apps/couch/test/bench/bench_marks.js new file mode 100644 index 00000000..4025adbb --- /dev/null +++ b/apps/couch/test/bench/bench_marks.js @@ -0,0 +1,103 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var NUM_DOCS = 2000; +var NUM_BATCHES = 20; + +var init = function() { + var db = new CouchDB("bench_mark_db", {"X-Couch-Full-Commit": "false"}); + db.deleteDb(); + db.createDb(); + return db; +}; + +var timeit = function(func) { + var startTime = (new Date()).getTime(); + func(); + return ((new Date()).getTime() - startTime) / 1000; +}; + +var report = function(name, rate) { + rate = Math.round(parseFloat(rate) * 100) / 100; + console.log("" + name + ": " + rate + " docs/second"); +}; + +var makeDocs = function(n) { + docs = []; + for (var i=0; i < n; i++) { + docs.push({"foo":"bar"}); + }; + return docs; +}; + +var couchTests = {}; + +couchTests.single_doc_insert = function() { + var db = init(); + var len = timeit(function() { + for(var i = 0; i < NUM_DOCS; i++) { + db.save({"foo": "bar"}); + } + }); + report("Single doc inserts", NUM_DOCS/len); +}; + +couchTests.batch_ok_doc_insert = function() { + var db = init(); + var len = timeit(function() { + for(var i = 0; i < NUM_DOCS; i++) { + db.save({"foo":"bar"}, {"batch":"ok"}); + } + }); + report("Single doc inserts with batch=ok", NUM_DOCS/len); +}; + +couchTests.bulk_doc_100 = function() { + var db = init(); + var len = timeit(function() { + for(var i = 0; i < NUM_BATCHES; i++) { + db.bulkSave(makeDocs(100)); + } + }); + report("Bulk docs - 100", (NUM_BATCHES*100)/len); +}; + +couchTests.bulk_doc_1000 = function() { + var db = init(); + var len = timeit(function() { + for(var i = 0; i < NUM_BATCHES; i++) { + db.bulkSave(makeDocs(1000)); + } + }); + report("Bulk docs - 1000", (NUM_BATCHES*1000)/len); +}; + + +couchTests.bulk_doc_5000 = function() { + var db = init(); + var len = timeit(function() { + for(var i = 0; i < NUM_BATCHES; i++) { + db.bulkSave(makeDocs(5000)); + } + }); + report("Bulk docs - 5000", (NUM_BATCHES*5000)/len); +}; + +couchTests.bulk_doc_10000 = function() { + var db = init(); + var len = timeit(function() { + for(var i = 0; i < NUM_BATCHES; i++) { + db.bulkSave(makeDocs(10000)); + } + }); + report("Bulk docs - 10000", (NUM_BATCHES*10000)/len); +}; diff --git a/apps/couch/test/bench/benchbulk.sh b/apps/couch/test/bench/benchbulk.sh new file mode 100755 index 00000000..22804c64 --- /dev/null +++ b/apps/couch/test/bench/benchbulk.sh @@ -0,0 +1,69 @@ +#!/bin/sh -e +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# + +# usage: time benchbulk.sh +# it takes about 30 seconds to run on my old MacBook with bulksize 1000 + +BULKSIZE=100 +DOCSIZE=10 +INSERTS=10 +ROUNDS=10 +DBURL="http://127.0.0.1:5984/benchbulk" +POSTURL="$DBURL/_bulk_docs" + +function make_bulk_docs() { + ROW=0 + SIZE=$(($1-1)) + START=$2 + BODYSIZE=$3 + + BODY=$(printf "%0${BODYSIZE}d") + + echo '{"docs":[' + while [ $ROW -lt $SIZE ]; do + printf '{"_id":"%020d", "body":"'$BODY'"},' $(($ROW + $START)) + let ROW=ROW+1 + done + printf '{"_id":"%020d", "body":"'$BODY'"}' $(($ROW + $START)) + echo ']}' +} + +echo "Making $INSERTS bulk inserts of $BULKSIZE docs each" + +echo "Attempt to delete db at $DBURL" +curl -X DELETE $DBURL -w\\n + +echo "Attempt to create db at $DBURL" +curl -X PUT $DBURL -w\\n + +echo "Running $ROUNDS rounds of $INSERTS concurrent inserts to $POSTURL" +RUN=0 +while [ $RUN -lt $ROUNDS ]; do + + POSTS=0 + while [ $POSTS -lt $INSERTS ]; do + STARTKEY=$[ POSTS * BULKSIZE + RUN * BULKSIZE * INSERTS ] + echo "startkey $STARTKEY bulksize $BULKSIZE" + DOCS=$(make_bulk_docs $BULKSIZE $STARTKEY $DOCSIZE) + # echo $DOCS + echo $DOCS | curl -T - -X POST $POSTURL -w%{http_code}\ %{time_total}\ sec\\n >/dev/null 2>&1 & + let POSTS=POSTS+1 + done + + echo "waiting" + wait + let RUN=RUN+1 +done + +curl $DBURL -w\\n diff --git a/apps/couch/test/bench/run.tpl b/apps/couch/test/bench/run.tpl new file mode 100755 index 00000000..9307863f --- /dev/null +++ b/apps/couch/test/bench/run.tpl @@ -0,0 +1,28 @@ +#!/bin/sh -e + +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +SRC_DIR=%abs_top_srcdir% +SCRIPT_DIR=$SRC_DIR/share/www/script +JS_TEST_DIR=$SRC_DIR/test/javascript +JS_BENCH_DIR=$SRC_DIR/test/bench + +COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs + +cat $SCRIPT_DIR/json2.js \ + $SCRIPT_DIR/couch.js \ + $JS_TEST_DIR/couch_http.js \ + $JS_BENCH_DIR/bench_marks.js \ + $JS_TEST_DIR/cli_runner.js \ + | $COUCHJS - + diff --git a/apps/couch/test/etap/001-load.t b/apps/couch/test/etap/001-load.t new file mode 100755 index 00000000..6f49e1ba --- /dev/null +++ b/apps/couch/test/etap/001-load.t @@ -0,0 +1,68 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +% Test that we can load each module. + +main(_) -> + test_util:init_code_path(), + etap:plan(37), + Modules = [ + couch_btree, + couch_config, + couch_config_writer, + couch_db, + couch_db_update_notifier, + couch_db_update_notifier_sup, + couch_db_updater, + couch_doc, + couch_event_sup, + couch_external_manager, + couch_external_server, + couch_file, + couch_httpd, + couch_httpd_db, + couch_httpd_external, + couch_httpd_misc_handlers, + couch_httpd_show, + couch_httpd_stats_handlers, + couch_httpd_view, + couch_key_tree, + couch_log, + couch_os_process, + couch_query_servers, + couch_ref_counter, + couch_rep, + couch_rep_sup, + couch_server, + couch_server_sup, + couch_stats_aggregator, + couch_stats_collector, + couch_stream, + couch_task_status, + couch_util, + couch_view, + couch_view_compactor, + couch_view_group, + couch_view_updater + ], + + lists:foreach( + fun(Module) -> + etap_can:loaded_ok( + Module, + lists:concat(["Loaded: ", Module]) + ) + end, Modules), + etap:end_tests(). diff --git a/apps/couch/test/etap/002-icu-driver.t b/apps/couch/test/etap/002-icu-driver.t new file mode 100644 index 00000000..d70f3303 --- /dev/null +++ b/apps/couch/test/etap/002-icu-driver.t @@ -0,0 +1,33 @@ +#!/usr/bin/env escript +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + + +main(_) -> + test_util:init_code_path(), + etap:plan(3), + etap:is( + couch_util:start_driver("src/couchdb/priv/.libs"), + ok, + "Started couch_icu_driver." + ), + etap:is( + couch_util:collate(<<"foo">>, <<"bar">>), + 1, + "Can collate stuff" + ), + etap:is( + couch_util:collate(<<"A">>, <<"aa">>), + -1, + "Collate's non-ascii style." + ), + etap:end_tests(). diff --git a/apps/couch/test/etap/010-file-basics.t b/apps/couch/test/etap/010-file-basics.t new file mode 100755 index 00000000..a3599f1a --- /dev/null +++ b/apps/couch/test/etap/010-file-basics.t @@ -0,0 +1,107 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +filename() -> test_util:build_file("test/etap/temp.010"). + +main(_) -> + test_util:init_code_path(), + etap:plan(19), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test() -> + etap:is({error, enoent}, couch_file:open("not a real file"), + "Opening a non-existant file should return an enoent error."), + + etap:fun_is( + fun({ok, _}) -> true; (_) -> false end, + couch_file:open(filename() ++ ".1", [create, invalid_option]), + "Invalid flags to open are ignored." + ), + + {ok, Fd} = couch_file:open(filename() ++ ".0", [create, overwrite]), + etap:ok(is_pid(Fd), + "Returned file descriptor is a Pid"), + + etap:is({ok, 0}, couch_file:bytes(Fd), + "Newly created files have 0 bytes."), + + etap:is({ok, 0}, couch_file:append_term(Fd, foo), + "Appending a term returns the previous end of file position."), + + {ok, Size} = couch_file:bytes(Fd), + etap:is_greater(Size, 0, + "Writing a term increased the file size."), + + etap:is({ok, Size}, couch_file:append_binary(Fd, <<"fancy!">>), + "Appending a binary returns the current file size."), + + etap:is({ok, foo}, couch_file:pread_term(Fd, 0), + "Reading the first term returns what we wrote: foo"), + + etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size), + "Reading back the binary returns what we wrote: <<\"fancy\">>."), + + etap:is({ok, <<131, 100, 0, 3, 102, 111, 111>>}, + couch_file:pread_binary(Fd, 0), + "Reading a binary at a term position returns the term as binary." + ), + + {ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>), + etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos), + "Reading a term from a written binary term representation succeeds."), + + BigBin = list_to_binary(lists:duplicate(100000, 0)), + {ok, BigBinPos} = couch_file:append_binary(Fd, BigBin), + etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos), + "Reading a large term from a written representation succeeds."), + + ok = couch_file:write_header(Fd, hello), + etap:is({ok, hello}, couch_file:read_header(Fd), + "Reading a header succeeds."), + + {ok, BigBinPos2} = couch_file:append_binary(Fd, BigBin), + etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2), + "Reading a large term from a written representation succeeds 2."), + + % append_binary == append_iolist? + % Possible bug in pread_iolist or iolist() -> append_binary + {ok, IOLPos} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]), + etap:is({ok, [<<"foombam">>]}, couch_file:pread_iolist(Fd, IOLPos), + "Reading an results in a binary form of the written iolist()"), + + % XXX: How does on test fsync? + etap:is(ok, couch_file:sync(Fd), + "Syncing does not cause an error."), + + etap:is(ok, couch_file:truncate(Fd, Size), + "Truncating a file succeeds."), + + %etap:is(eof, (catch couch_file:pread_binary(Fd, Size)), + % "Reading data that was truncated fails.") + etap:skip(fun() -> ok end, + "No idea how to test reading beyond EOF"), + + etap:is({ok, foo}, couch_file:pread_term(Fd, 0), + "Truncating does not affect data located before the truncation mark."), + + etap:is(ok, couch_file:close(Fd), + "Files close properly."), + ok. diff --git a/apps/couch/test/etap/011-file-headers.t b/apps/couch/test/etap/011-file-headers.t new file mode 100755 index 00000000..4705f629 --- /dev/null +++ b/apps/couch/test/etap/011-file-headers.t @@ -0,0 +1,145 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +filename() -> test_util:build_file("test/etap/temp.011"). +sizeblock() -> 4096. % Need to keep this in sync with couch_file.erl + +main(_) -> + test_util:init_code_path(), + {S1, S2, S3} = now(), + random:seed(S1, S2, S3), + + etap:plan(17), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test() -> + {ok, Fd} = couch_file:open(filename(), [create,overwrite]), + + etap:is({ok, 0}, couch_file:bytes(Fd), + "File should be initialized to contain zero bytes."), + + etap:is(ok, couch_file:write_header(Fd, {<<"some_data">>, 32}), + "Writing a header succeeds."), + + {ok, Size1} = couch_file:bytes(Fd), + etap:is_greater(Size1, 0, + "Writing a header allocates space in the file."), + + etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd), + "Reading the header returns what we wrote."), + + etap:is(ok, couch_file:write_header(Fd, [foo, <<"more">>]), + "Writing a second header succeeds."), + + {ok, Size2} = couch_file:bytes(Fd), + etap:is_greater(Size2, Size1, + "Writing a second header allocates more space."), + + etap:is({ok, [foo, <<"more">>]}, couch_file:read_header(Fd), + "Reading the second header does not return the first header."), + + % Delete the second header. + ok = couch_file:truncate(Fd, Size1), + + etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd), + "Reading the header after a truncation returns a previous header."), + + couch_file:write_header(Fd, [foo, <<"more">>]), + etap:is({ok, Size2}, couch_file:bytes(Fd), + "Rewriting the same second header returns the same second size."), + + ok = couch_file:close(Fd), + + % Now for the fun stuff. Try corrupting the second header and see + % if we recover properly. + + % Destroy the 0x1 byte that marks a header + check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> + etap:isnt(Expect, couch_file:read_header(CouchFd), + "Should return a different header before corruption."), + file:pwrite(RawFd, HeaderPos, <<0>>), + etap:is(Expect, couch_file:read_header(CouchFd), + "Corrupting the byte marker should read the previous header.") + end), + + % Corrupt the size. + check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> + etap:isnt(Expect, couch_file:read_header(CouchFd), + "Should return a different header before corruption."), + % +1 for 0x1 byte marker + file:pwrite(RawFd, HeaderPos+1, <<10/integer>>), + etap:is(Expect, couch_file:read_header(CouchFd), + "Corrupting the size should read the previous header.") + end), + + % Corrupt the MD5 signature + check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> + etap:isnt(Expect, couch_file:read_header(CouchFd), + "Should return a different header before corruption."), + % +5 = +1 for 0x1 byte and +4 for term size. + file:pwrite(RawFd, HeaderPos+5, <<"F01034F88D320B22">>), + etap:is(Expect, couch_file:read_header(CouchFd), + "Corrupting the MD5 signature should read the previous header.") + end), + + % Corrupt the data + check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> + etap:isnt(Expect, couch_file:read_header(CouchFd), + "Should return a different header before corruption."), + % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig + file:pwrite(RawFd, HeaderPos+21, <<"some data goes here!">>), + etap:is(Expect, couch_file:read_header(CouchFd), + "Corrupting the header data should read the previous header.") + end), + + ok. + +check_header_recovery(CheckFun) -> + {ok, Fd} = couch_file:open(filename(), [create,overwrite]), + {ok, RawFd} = file:open(filename(), [read, write, raw, binary]), + + {ok, _} = write_random_data(Fd), + ExpectHeader = {some_atom, <<"a binary">>, 756}, + ok = couch_file:write_header(Fd, ExpectHeader), + + {ok, HeaderPos} = write_random_data(Fd), + ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}), + + CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos), + + ok = file:close(RawFd), + ok = couch_file:close(Fd), + ok. + +write_random_data(Fd) -> + write_random_data(Fd, 100 + random:uniform(1000)). + +write_random_data(Fd, 0) -> + {ok, Bytes} = couch_file:bytes(Fd), + {ok, (1 + Bytes div sizeblock()) * sizeblock()}; +write_random_data(Fd, N) -> + Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]], + Term = lists:nth(random:uniform(4) + 1, Choices), + {ok, _} = couch_file:append_term(Fd, Term), + write_random_data(Fd, N-1). + diff --git a/apps/couch/test/etap/020-btree-basics.t b/apps/couch/test/etap/020-btree-basics.t new file mode 100755 index 00000000..18c4a836 --- /dev/null +++ b/apps/couch/test/etap/020-btree-basics.t @@ -0,0 +1,205 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +filename() -> test_util:build_file("test/etap/temp.020"). +rows() -> 250. + +-record(btree, {fd, root, extract_kv, assemble_kv, less, reduce}). + +main(_) -> + test_util:init_code_path(), + etap:plan(48), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +%% @todo Determine if this number should be greater to see if the btree was +%% broken into multiple nodes. AKA "How do we appropiately detect if multiple +%% nodes were created." +test()-> + Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, rows())], + etap:ok(test_kvs(Sorted), "Testing sorted keys"), + etap:ok(test_kvs(lists:reverse(Sorted)), "Testing reversed sorted keys"), + etap:ok(test_kvs(shuffle(Sorted)), "Testing shuffled keys."), + ok. + +test_kvs(KeyValues) -> + ReduceFun = fun + (reduce, KVs) -> + length(KVs); + (rereduce, Reds) -> + lists:sum(Reds) + end, + + Keys = [K || {K, _} <- KeyValues], + + {ok, Fd} = couch_file:open(filename(), [create,overwrite]), + {ok, Btree} = couch_btree:open(nil, Fd), + etap:ok(is_record(Btree, btree), "Created btree is really a btree record"), + etap:is(Btree#btree.fd, Fd, "Btree#btree.fd is set correctly."), + etap:is(Btree#btree.root, nil, "Btree#btree.root is set correctly."), + + Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]), + etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"), + {ok, _, EmptyRes} = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0), + etap:is(EmptyRes, 0, "Folding over an empty btree"), + + {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []), + etap:ok(test_btree(Btree2, KeyValues), + "Adding all keys at once returns a complete btree."), + + etap:fun_is( + fun + ({ok, {kp_node, _}}) -> true; + (_) -> false + end, + couch_file:pread_term(Fd, element(1, Btree2#btree.root)), + "Btree root pointer is a kp_node." + ), + + {ok, Btree3} = couch_btree:add_remove(Btree2, [], Keys), + etap:ok(test_btree(Btree3, []), + "Removing all keys at once returns an empty btree."), + + Btree4 = lists:foldl(fun(KV, BtAcc) -> + {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []), + BtAcc2 + end, Btree3, KeyValues), + etap:ok(test_btree(Btree4, KeyValues), + "Adding all keys one at a time returns a complete btree."), + + Btree5 = lists:foldl(fun({K, _}, BtAcc) -> + {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]), + BtAcc2 + end, Btree4, KeyValues), + etap:ok(test_btree(Btree5, []), + "Removing all keys one at a time returns an empty btree."), + + KeyValuesRev = lists:reverse(KeyValues), + Btree6 = lists:foldl(fun(KV, BtAcc) -> + {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []), + BtAcc2 + end, Btree5, KeyValuesRev), + etap:ok(test_btree(Btree6, KeyValues), + "Adding all keys in reverse order returns a complete btree."), + + {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) -> + case Count rem 2 == 0 of + true-> {Count+1, [X | Left], Right}; + false -> {Count+1, Left, [X | Right]} + end + end, {0, [], []}, KeyValues), + + etap:ok(test_add_remove(Btree6, Rem2Keys0, Rem2Keys1), + "Add/Remove every other key."), + + etap:ok(test_add_remove(Btree6, Rem2Keys1, Rem2Keys0), + "Add/Remove opposite every other key."), + + {ok, Btree7} = couch_btree:add_remove(Btree6, [], [K||{K,_}<-Rem2Keys1]), + {ok, Btree8} = couch_btree:add_remove(Btree7, [], [K||{K,_}<-Rem2Keys0]), + etap:ok(test_btree(Btree8, []), + "Removing both halves of every other key returns an empty btree."), + + %% Third chunk (close out) + etap:is(couch_file:close(Fd), ok, "closing out"), + true. + +test_btree(Btree, KeyValues) -> + ok = test_key_access(Btree, KeyValues), + ok = test_lookup_access(Btree, KeyValues), + ok = test_final_reductions(Btree, KeyValues), + true. + +test_add_remove(Btree, OutKeyValues, RemainingKeyValues) -> + Btree2 = lists:foldl(fun({K, _}, BtAcc) -> + {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]), + BtAcc2 + end, Btree, OutKeyValues), + true = test_btree(Btree2, RemainingKeyValues), + + Btree3 = lists:foldl(fun(KV, BtAcc) -> + {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []), + BtAcc2 + end, Btree2, OutKeyValues), + true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues). + +test_key_access(Btree, List) -> + FoldFun = fun(Element, {[HAcc|TAcc], Count}) -> + case Element == HAcc of + true -> {ok, {TAcc, Count + 1}}; + _ -> {ok, {TAcc, Count + 1}} + end + end, + Length = length(List), + Sorted = lists:sort(List), + {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}), + {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun, {Sorted, 0}, [{dir, rev}]), + ok. + +test_lookup_access(Btree, KeyValues) -> + FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end, + lists:foreach(fun({Key, Value}) -> + [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]), + {ok, _, true} = couch_btree:foldl(Btree, FoldFun, {Key, Value}, [{start_key, Key}]) + end, KeyValues). + +test_final_reductions(Btree, KeyValues) -> + KVLen = length(KeyValues), + FoldLFun = fun(_X, LeadingReds, Acc) -> + CountToStart = KVLen div 3 + Acc, + CountToStart = couch_btree:final_reduce(Btree, LeadingReds), + {ok, Acc+1} + end, + FoldRFun = fun(_X, LeadingReds, Acc) -> + CountToEnd = KVLen - KVLen div 3 + Acc, + CountToEnd = couch_btree:final_reduce(Btree, LeadingReds), + {ok, Acc+1} + end, + {LStartKey, _} = case KVLen of + 0 -> {nil, nil}; + _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues)) + end, + {RStartKey, _} = case KVLen of + 0 -> {nil, nil}; + _ -> lists:nth(KVLen div 3, lists:sort(KeyValues)) + end, + {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0, [{start_key, LStartKey}]), + {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0, [{dir, rev}, {start_key, RStartKey}]), + KVLen = FoldLRed + FoldRRed, + ok. + +shuffle(List) -> + randomize(round(math:log(length(List)) + 0.5), List). + +randomize(1, List) -> + randomize(List); +randomize(T, List) -> + lists:foldl(fun(_E, Acc) -> + randomize(Acc) + end, randomize(List), lists:seq(1, (T - 1))). + +randomize(List) -> + D = lists:map(fun(A) -> + {random:uniform(), A} + end, List), + {_, D1} = lists:unzip(lists:keysort(1, D)), + D1. diff --git a/apps/couch/test/etap/021-btree-reductions.t b/apps/couch/test/etap/021-btree-reductions.t new file mode 100755 index 00000000..3e19c767 --- /dev/null +++ b/apps/couch/test/etap/021-btree-reductions.t @@ -0,0 +1,141 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +filename() -> "./test/etap/temp.021". +rows() -> 1000. + +main(_) -> + test_util:init_code_path(), + etap:plan(8), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test()-> + ReduceFun = fun + (reduce, KVs) -> length(KVs); + (rereduce, Reds) -> lists:sum(Reds) + end, + + {ok, Fd} = couch_file:open(filename(), [create,overwrite]), + {ok, Btree} = couch_btree:open(nil, Fd, [{reduce, ReduceFun}]), + + % Create a list, of {"even", Value} or {"odd", Value} pairs. + {_, EvenOddKVs} = lists:foldl(fun(Idx, {Key, Acc}) -> + case Key of + "even" -> {"odd", [{{Key, Idx}, 1} | Acc]}; + _ -> {"even", [{{Key, Idx}, 1} | Acc]} + end + end, {"odd", []}, lists:seq(1, rows())), + + {ok, Btree2} = couch_btree:add_remove(Btree, EvenOddKVs, []), + + GroupFun = fun({K1, _}, {K2, _}) -> K1 == K2 end, + FoldFun = fun(GroupedKey, Unreduced, Acc) -> + {ok, [{GroupedKey, couch_btree:final_reduce(Btree2, Unreduced)} | Acc]} + end, + + {SK1, EK1} = {{"even", -1}, {"even", foo}}, + {SK2, EK2} = {{"odd", -1}, {"odd", foo}}, + + etap:fun_is( + fun + ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) -> + true; + (_) -> + false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}]), + "Reduction works with no specified direction, startkey, or endkey." + ), + + etap:fun_is( + fun + ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) -> + true; + (_) -> + false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, fwd}]), + "Reducing forward works with no startkey or endkey." + ), + + etap:fun_is( + fun + ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) -> + true; + (_) -> + false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, rev}]), + "Reducing backwards works with no startkey or endkey." + ), + + etap:fun_is( + fun + ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) -> + true; + (_) -> + false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK2}]), + "Reducing works over the entire range with startkey and endkey set." + ), + + etap:fun_is( + fun + ({ok, [{{"even", _}, 500}]}) -> true; + (_) -> false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]), + "Reducing foward over first half works with a startkey and endkey." + ), + + etap:fun_is( + fun + ({ok, [{{"odd", _}, 500}]}) -> true; + (_) -> false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]), + "Reducing foward over second half works with second startkey and endkey" + ), + + etap:fun_is( + fun + ({ok, [{{"odd", _}, 500}]}) -> true; + (_) -> false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK2}]), + "Reducing in reverse works after swapping the startkey and endkey." + ), + + etap:fun_is( + fun + ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) -> + true; + (_) -> + false + end, + couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK1}]), + "Reducing in reverse results in reversed accumulator." + ), + + couch_file:close(Fd). diff --git a/apps/couch/test/etap/030-doc-from-json.t b/apps/couch/test/etap/030-doc-from-json.t new file mode 100755 index 00000000..c4ef649a --- /dev/null +++ b/apps/couch/test/etap/030-doc-from-json.t @@ -0,0 +1,239 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_db.hrl") +-record(doc, {id= <<"">>, revs={0, []}, body={[]}, + atts=[], deleted=false, meta=[]}). +-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data, + encoding=identity}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +main(_) -> + test_util:init_code_path(), + etap:plan(26), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test() -> + couch_config:start_link([default_config()]), + couch_config:set("attachments", "compression_level", "0"), + ok = test_from_json_success(), + ok = test_from_json_errors(), + ok. + +test_from_json_success() -> + Cases = [ + { + {[]}, + #doc{}, + "Return an empty document for an empty JSON object." + }, + { + {[{<<"_id">>, <<"zing!">>}]}, + #doc{id= <<"zing!">>}, + "Parses document ids." + }, + { + {[{<<"_id">>, <<"_design/foo">>}]}, + #doc{id= <<"_design/foo">>}, + "_design/document ids." + }, + { + {[{<<"_id">>, <<"_local/bam">>}]}, + #doc{id= <<"_local/bam">>}, + "_local/document ids." + }, + { + {[{<<"_rev">>, <<"4-230234">>}]}, + #doc{revs={4, [<<"230234">>]}}, + "_rev stored in revs." + }, + { + {[{<<"soap">>, 35}]}, + #doc{body={[{<<"soap">>, 35}]}}, + "Non underscore prefixed fields stored in body." + }, + { + {[{<<"_attachments">>, {[ + {<<"my_attachment.fu">>, {[ + {<<"stub">>, true}, + {<<"content_type">>, <<"application/awesome">>}, + {<<"length">>, 45} + ]}}, + {<<"noahs_private_key.gpg">>, {[ + {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>}, + {<<"content_type">>, <<"application/pgp-signature">>} + ]}} + ]}}]}, + #doc{atts=[ + #att{ + name = <<"my_attachment.fu">>, + data = stub, + type = <<"application/awesome">>, + att_len = 45, + disk_len = 45, + revpos = nil + }, + #att{ + name = <<"noahs_private_key.gpg">>, + data = <<"I have a pet fish!">>, + type = <<"application/pgp-signature">>, + att_len = 18, + disk_len = 18, + revpos = 0 + } + ]}, + "Attachments are parsed correctly." + }, + { + {[{<<"_deleted">>, true}]}, + #doc{deleted=true}, + "_deleted controls the deleted field." + }, + { + {[{<<"_deleted">>, false}]}, + #doc{}, + "{\"_deleted\": false} is ok." + }, + { + {[ + {<<"_revisions">>, {[ + {<<"start">>, 4}, + {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]} + ]}}, + {<<"_rev">>, <<"6-something">>} + ]}, + #doc{revs={4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}}, + "_revisions attribute are preferred to _rev." + }, + { + {[{<<"_revs_info">>, dropping}]}, + #doc{}, + "Drops _revs_info." + }, + { + {[{<<"_local_seq">>, dropping}]}, + #doc{}, + "Drops _local_seq." + }, + { + {[{<<"_conflicts">>, dropping}]}, + #doc{}, + "Drops _conflicts." + }, + { + {[{<<"_deleted_conflicts">>, dropping}]}, + #doc{}, + "Drops _deleted_conflicts." + } + ], + + lists:foreach(fun({EJson, Expect, Mesg}) -> + etap:is(couch_doc:from_json_obj(EJson), Expect, Mesg) + end, Cases), + ok. + +test_from_json_errors() -> + Cases = [ + { + [], + {bad_request, "Document must be a JSON object"}, + "arrays are invalid" + }, + { + 4, + {bad_request, "Document must be a JSON object"}, + "integers are invalid" + }, + { + true, + {bad_request, "Document must be a JSON object"}, + "literals are invalid" + }, + { + {[{<<"_id">>, {[{<<"foo">>, 5}]}}]}, + {bad_request, <<"Document id must be a string">>}, + "Document id must be a string." + }, + { + {[{<<"_id">>, <<"_random">>}]}, + {bad_request, + <<"Only reserved document ids may start with underscore.">>}, + "Disallow arbitrary underscore prefixed docids." + }, + { + {[{<<"_rev">>, 5}]}, + {bad_request, <<"Invalid rev format">>}, + "_rev must be a string" + }, + { + {[{<<"_rev">>, "foobar"}]}, + {bad_request, <<"Invalid rev format">>}, + "_rev must be %d-%s" + }, + { + {[{<<"_rev">>, "foo-bar"}]}, + "Error if _rev's integer expection is broken." + }, + { + {[{<<"_revisions">>, {[{<<"start">>, true}]}}]}, + {doc_validation, "_revisions.start isn't an integer."}, + "_revisions.start must be an integer." + }, + { + {[{<<"_revisions">>, {[ + {<<"start">>, 0}, + {<<"ids">>, 5} + ]}}]}, + {doc_validation, "_revisions.ids isn't a array."}, + "_revions.ids must be a list." + }, + { + {[{<<"_revisions">>, {[ + {<<"start">>, 0}, + {<<"ids">>, [5]} + ]}}]}, + {doc_validation, "RevId isn't a string"}, + "Revision ids must be strings." + }, + { + {[{<<"_something">>, 5}]}, + {doc_validation, <<"Bad special document member: _something">>}, + "Underscore prefix fields are reserved." + } + ], + + lists:foreach(fun + ({EJson, Expect, Mesg}) -> + Error = (catch couch_doc:from_json_obj(EJson)), + etap:is(Error, Expect, Mesg); + ({EJson, Mesg}) -> + try + couch_doc:from_json_obj(EJson), + etap:ok(false, "Conversion failed to raise an exception.") + catch + _:_ -> etap:ok(true, Mesg) + end + end, Cases), + ok. diff --git a/apps/couch/test/etap/031-doc-to-json.t b/apps/couch/test/etap/031-doc-to-json.t new file mode 100755 index 00000000..605a6d00 --- /dev/null +++ b/apps/couch/test/etap/031-doc-to-json.t @@ -0,0 +1,200 @@ +#!/usr/bin/env escript +%% -*- erlang -*- +%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_db.hrl") +-record(doc, {id= <<"">>, revs={0, []}, body={[]}, + atts=[], deleted=false, meta=[]}). +-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data, + encoding=identity}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +main(_) -> + test_util:init_code_path(), + etap:plan(12), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test() -> + couch_config:start_link([default_config()]), + couch_config:set("attachments", "compression_level", "0"), + ok = test_to_json_success(), + ok. + +test_to_json_success() -> + Cases = [ + { + #doc{}, + {[{<<"_id">>, <<"">>}]}, + "Empty docs are {\"_id\": \"\"}" + }, + { + #doc{id= <<"foo">>}, + {[{<<"_id">>, <<"foo">>}]}, + "_id is added." + }, + { + #doc{revs={5, ["foo"]}}, + {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]}, + "_rev is added." + }, + { + [revs], + #doc{revs={5, [<<"first">>, <<"second">>]}}, + {[ + {<<"_id">>, <<>>}, + {<<"_rev">>, <<"5-first">>}, + {<<"_revisions">>, {[ + {<<"start">>, 5}, + {<<"ids">>, [<<"first">>, <<"second">>]} + ]}} + ]}, + "_revisions include with revs option" + }, + { + #doc{body={[{<<"foo">>, <<"bar">>}]}}, + {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]}, + "Arbitrary fields are added." + }, + { + #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}}, + {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]}, + "Deleted docs no longer drop body members." + }, + { + #doc{meta=[ + {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]} + ]}, + {[ + {<<"_id">>, <<>>}, + {<<"_revs_info">>, [ + {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]}, + {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]} + ]} + ]}, + "_revs_info field is added correctly." + }, + { + #doc{meta=[{local_seq, 5}]}, + {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]}, + "_local_seq is added as an integer." + }, + { + #doc{meta=[{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]}, + {[ + {<<"_id">>, <<>>}, + {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]} + ]}, + "_conflicts is added as an array of strings." + }, + { + #doc{meta=[{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]}, + {[ + {<<"_id">>, <<>>}, + {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]} + ]}, + "_deleted_conflicsts is added as an array of strings." + }, + { + #doc{atts=[ + #att{ + name = <<"big.xml">>, + type = <<"xml/sucks">>, + data = fun() -> ok end, + revpos = 1, + att_len = 400, + disk_len = 400 + }, + #att{ + name = <<"fast.json">>, + type = <<"json/ftw">>, + data = <<"{\"so\": \"there!\"}">>, + revpos = 1, + att_len = 16, + disk_len = 16 + } + ]}, + {[ + {<<"_id">>, <<>>}, + {<<"_attachments">>, {[ + {<<"big.xml">>, {[ + {<<"content_type">>, <<"xml/sucks">>}, + {<<"revpos">>, 1}, + {<<"length">>, 400}, + {<<"stub">>, true} + ]}}, + {<<"fast.json">>, {[ + {<<"content_type">>, <<"json/ftw">>}, + {<<"revpos">>, 1}, + {<<"length">>, 16}, + {<<"stub">>, true} + ]}} + ]}} + ]}, + "Attachments attached as stubs only include a length." + }, + { + [attachments], + #doc{atts=[ + #att{ + name = <<"stuff.txt">>, + type = <<"text/plain">>, + data = fun() -> <<"diet pepsi">> end, + revpos = 1, + att_len = 10, + disk_len = 10 + }, + #att{ + name = <<"food.now">>, + type = <<"application/food">>, + revpos = 1, + data = <<"sammich">> + } + ]}, + {[ + {<<"_id">>, <<>>}, + {<<"_attachments">>, {[ + {<<"stuff.txt">>, {[ + {<<"content_type">>, <<"text/plain">>}, + {<<"revpos">>, 1}, + {<<"data">>, <<"ZGlldCBwZXBzaQ==">>} + ]}}, + {<<"food.now">>, {[ + {<<"content_type">>, <<"application/food">>}, + {<<"revpos">>, 1}, + {<<"data">>, <<"c2FtbWljaA==">>} + ]}} + ]}} + ]}, + "Attachments included inline with attachments option." + } + ], + + lists:foreach(fun + ({Doc, EJson, Mesg}) -> + etap:is(couch_doc:to_json_obj(Doc, []), EJson, Mesg); + ({Options, Doc, EJson, Mesg}) -> + etap:is(couch_doc:to_json_obj(Doc, Options), EJson, Mesg) + end, Cases), + ok. + diff --git a/apps/couch/test/etap/040-util.t b/apps/couch/test/etap/040-util.t new file mode 100755 index 00000000..8f80db87 --- /dev/null +++ b/apps/couch/test/etap/040-util.t @@ -0,0 +1,80 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + application:start(crypto), + + etap:plan(14), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + % to_existing_atom + etap:is(true, couch_util:to_existing_atom(true), "An atom is an atom."), + etap:is(foo, couch_util:to_existing_atom(<<"foo">>), + "A binary foo is the atom foo."), + etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"), + "A list of atoms is one munged atom."), + + % implode + etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"), + "use & as separator in list."), + + % trim + Strings = [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"], + etap:ok(lists:all(fun(S) -> couch_util:trim(S) == "foo" end, Strings), + "everything here trimmed should be foo."), + + % abs_pathname + {ok, Cwd} = file:get_cwd(), + etap:is(Cwd ++ "/foo", couch_util:abs_pathname("./foo"), + "foo is in this directory."), + + % should_flush + etap:ok(not couch_util:should_flush(), + "Not using enough memory to flush."), + AcquireMem = fun() -> + IntsToAGazillion = lists:seq(1, 200000), + LotsOfData = lists:map( + fun(Int) -> {Int, <<"foobar">>} end, + lists:seq(1, 500000)), + etap:ok(couch_util:should_flush(), + "Allocation 200K tuples puts us above the memory threshold.") + end, + AcquireMem(), + + etap:ok(not couch_util:should_flush(), + "Checking to flush invokes GC."), + + % verify + etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"), + "String comparison."), + etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"), + "String comparison (unequal lengths)."), + etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>), + "Binary comparison."), + etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>), + "Binary comparison (unequal lengths)."), + etap:is(false, couch_util:verify(nil, <<"ahBase3r">>), + "Binary comparison with atom."), + + ok. diff --git a/apps/couch/test/etap/041-uuid-gen-seq.ini b/apps/couch/test/etap/041-uuid-gen-seq.ini new file mode 100644 index 00000000..94cebc6f --- /dev/null +++ b/apps/couch/test/etap/041-uuid-gen-seq.ini @@ -0,0 +1,19 @@ +; Licensed to the Apache Software Foundation (ASF) under one +; or more contributor license agreements. See the NOTICE file +; distributed with this work for additional information +; regarding copyright ownership. The ASF licenses this file +; to you under the Apache License, Version 2.0 (the +; "License"); you may not use this file except in compliance +; with the License. You may obtain a copy of the License at +; +; http://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, +; software distributed under the License is distributed on an +; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +; KIND, either express or implied. See the License for the +; specific language governing permissions and limitations +; under the License. + +[uuids] +algorithm = sequential diff --git a/apps/couch/test/etap/041-uuid-gen-utc.ini b/apps/couch/test/etap/041-uuid-gen-utc.ini new file mode 100644 index 00000000..c2b83831 --- /dev/null +++ b/apps/couch/test/etap/041-uuid-gen-utc.ini @@ -0,0 +1,19 @@ +; Licensed to the Apache Software Foundation (ASF) under one +; or more contributor license agreements. See the NOTICE file +; distributed with this work for additional information +; regarding copyright ownership. The ASF licenses this file +; to you under the Apache License, Version 2.0 (the +; "License"); you may not use this file except in compliance +; with the License. You may obtain a copy of the License at +; +; http://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, +; software distributed under the License is distributed on an +; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +; KIND, either express or implied. See the License for the +; specific language governing permissions and limitations +; under the License. + +[uuids] +algorithm = utc_random diff --git a/apps/couch/test/etap/041-uuid-gen.t b/apps/couch/test/etap/041-uuid-gen.t new file mode 100755 index 00000000..1e6aa9ee --- /dev/null +++ b/apps/couch/test/etap/041-uuid-gen.t @@ -0,0 +1,118 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +seq_alg_config() -> + test_util:source_file("test/etap/041-uuid-gen-seq.ini"). + +utc_alg_config() -> + test_util:source_file("test/etap/041-uuid-gen-utc.ini"). + +% Run tests and wait for the gen_servers to shutdown +run_test(IniFiles, Test) -> + {ok, Pid} = couch_config:start_link(IniFiles), + erlang:monitor(process, Pid), + couch_uuids:start(), + Test(), + couch_uuids:stop(), + couch_config:stop(), + receive + {'DOWN', _, _, Pid, _} -> ok; + _Other -> etap:diag("OTHER: ~p~n", [_Other]) + after + 1000 -> throw({timeout_error, config_stop}) + end. + +main(_) -> + test_util:init_code_path(), + application:start(crypto), + etap:plan(6), + + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + + TestUnique = fun() -> + etap:is( + test_unique(10000, couch_uuids:new()), + true, + "Can generate 10K unique IDs" + ) + end, + run_test([default_config()], TestUnique), + run_test([default_config(), seq_alg_config()], TestUnique), + run_test([default_config(), utc_alg_config()], TestUnique), + + TestMonotonic = fun () -> + etap:is( + couch_uuids:new() < couch_uuids:new(), + true, + "should produce monotonically increasing ids" + ) + end, + run_test([default_config(), seq_alg_config()], TestMonotonic), + run_test([default_config(), utc_alg_config()], TestMonotonic), + + % Pretty sure that the average of a uniform distribution is the + % midpoint of the range. Thus, to exceed a threshold, we need + % approximately Total / (Range/2 + RangeMin) samples. + % + % In our case this works out to be 8194. (0xFFF000 / 0x7FF) + % These tests just fudge the limits for a good generator at 25% + % in either direction. Technically it should be possible to generate + % bounds that will show if your random number generator is not + % sufficiently random but I hated statistics in school. + TestRollOver = fun() -> + UUID = binary_to_list(couch_uuids:new()), + Prefix = element(1, lists:split(26, UUID)), + N = gen_until_pref_change(Prefix,0), + etap:diag("N is: ~p~n",[N]), + etap:is( + N >= 5000 andalso N =< 11000, + true, + "should roll over every so often." + ) + end, + run_test([default_config(), seq_alg_config()], TestRollOver). + +test_unique(0, _) -> + true; +test_unique(N, UUID) -> + case couch_uuids:new() of + UUID -> + etap:diag("N: ~p~n", [N]), + false; + Else -> test_unique(N-1, Else) + end. + +get_prefix(UUID) -> + element(1, lists:split(26, binary_to_list(UUID))). + +gen_until_pref_change(_, Count) when Count > 8251 -> + Count; +gen_until_pref_change(Prefix, N) -> + case get_prefix(couch_uuids:new()) of + Prefix -> gen_until_pref_change(Prefix, N+1); + _ -> N + end. diff --git a/apps/couch/test/etap/050-stream.t b/apps/couch/test/etap/050-stream.t new file mode 100755 index 00000000..545dd524 --- /dev/null +++ b/apps/couch/test/etap/050-stream.t @@ -0,0 +1,87 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(13), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +read_all(Fd, PosList) -> + Data = couch_stream:foldl(Fd, PosList, fun(Bin, Acc) -> [Bin, Acc] end, []), + iolist_to_binary(Data). + +test() -> + {ok, Fd} = couch_file:open("test/etap/temp.050", [create,overwrite]), + {ok, Stream} = couch_stream:open(Fd), + + etap:is(ok, couch_stream:write(Stream, <<"food">>), + "Writing to streams works."), + + etap:is(ok, couch_stream:write(Stream, <<"foob">>), + "Consecutive writing to streams works."), + + etap:is(ok, couch_stream:write(Stream, <<>>), + "Writing an empty binary does nothing."), + + {Ptrs, Length, _, _, _} = couch_stream:close(Stream), + etap:is(Ptrs, [0], "Close returns the file pointers."), + etap:is(Length, 8, "Close also returns the number of bytes written."), + etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."), + + % Remeber where we expect the pointer to be. + {ok, ExpPtr} = couch_file:bytes(Fd), + {ok, Stream2} = couch_stream:open(Fd), + OneBits = <<1:(8*10)>>, + etap:is(ok, couch_stream:write(Stream2, OneBits), + "Successfully wrote 80 1 bits."), + + ZeroBits = <<0:(8*10)>>, + etap:is(ok, couch_stream:write(Stream2, ZeroBits), + "Successfully wrote 80 0 bits."), + + {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2), + etap:is(Ptrs2, [ExpPtr], "Closing stream returns the file pointers."), + etap:is(Length2, 20, "Length written is 160 bytes."), + + AllBits = iolist_to_binary([OneBits,ZeroBits]), + etap:is(AllBits, read_all(Fd, Ptrs2), "Returned pointers are valid."), + + % Stream more the 4K chunk size. + {ok, ExpPtr2} = couch_file:bytes(Fd), + {ok, Stream3} = couch_stream:open(Fd), + Acc2 = lists:foldl(fun(_, Acc) -> + Data = <<"a1b2c">>, + couch_stream:write(Stream3, Data), + [Data | Acc] + end, [], lists:seq(1, 1024)), + {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3), + + % 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold) + % + 5 puts us over the threshold + % + 4 bytes for the term_to_binary adding a length header + % + 1 byte every 4K for tail append headers + SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1, + etap:is(Ptrs3, [ExpPtr2, SecondPtr], "Pointers every 4K bytes."), + etap:is(Length3, 5120, "Wrote the expected 5K bytes."), + + couch_file:close(Fd), + ok. diff --git a/apps/couch/test/etap/060-kt-merging.t b/apps/couch/test/etap/060-kt-merging.t new file mode 100755 index 00000000..d6b13d6d --- /dev/null +++ b/apps/couch/test/etap/060-kt-merging.t @@ -0,0 +1,140 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(16), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + EmptyTree = [], + One = [{0, {"1","foo",[]}}], + TwoSibs = [{0, {"1","foo",[]}}, + {0, {"2","foo",[]}}], + OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}], + TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}], + TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, + {"1b", "bar", []}]}}], + TwoChildSibs2 = [{0, {"1","foo", [{"1a", "bar", []}, + {"1b", "bar", [{"1bb", "boo", []}]}]}}], + Stemmed1b = [{1, {"1a", "bar", []}}], + Stemmed1a = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], + Stemmed1aa = [{2, {"1aa", "bar", []}}], + Stemmed1bb = [{2, {"1bb", "boo", []}}], + + etap:is( + {EmptyTree, no_conflicts}, + couch_key_tree:merge(EmptyTree, EmptyTree), + "Merging two empty trees yields an empty tree." + ), + + etap:is( + {One, no_conflicts}, + couch_key_tree:merge(EmptyTree, One), + "The empty tree is the identity for merge." + ), + + etap:is( + {One, no_conflicts}, + couch_key_tree:merge(One, EmptyTree), + "Merging is commutative." + ), + + etap:is( + {TwoSibs, no_conflicts}, + couch_key_tree:merge(One, TwoSibs), + "Merging a prefix of a tree with the tree yields the tree." + ), + + etap:is( + {One, no_conflicts}, + couch_key_tree:merge(One, One), + "Merging is reflexive." + ), + + etap:is( + {TwoChild, no_conflicts}, + couch_key_tree:merge(TwoChild, TwoChild), + "Merging two children is still reflexive." + ), + + etap:is( + {TwoChildSibs, no_conflicts}, + couch_key_tree:merge(TwoChildSibs, TwoChildSibs), + "Merging a tree to itself is itself."), + + etap:is( + {TwoChildSibs, no_conflicts}, + couch_key_tree:merge(TwoChildSibs, Stemmed1b), + "Merging a tree with a stem." + ), + + etap:is( + {TwoChildSibs, no_conflicts}, + couch_key_tree:merge(Stemmed1b, TwoChildSibs), + "Merging in the opposite direction." + ), + + etap:is( + {TwoChildSibs2, no_conflicts}, + couch_key_tree:merge(TwoChildSibs2, Stemmed1bb), + "Merging a stem at a deeper level." + ), + + etap:is( + {TwoChildSibs2, no_conflicts}, + couch_key_tree:merge(Stemmed1bb, TwoChildSibs2), + "Merging a deeper level in opposite order." + ), + + etap:is( + {TwoChild, no_conflicts}, + couch_key_tree:merge(TwoChild, Stemmed1aa), + "Merging a single tree with a deeper stem." + ), + + etap:is( + {TwoChild, no_conflicts}, + couch_key_tree:merge(TwoChild, Stemmed1a), + "Merging a larger stem." + ), + + etap:is( + {Stemmed1a, no_conflicts}, + couch_key_tree:merge(Stemmed1a, Stemmed1aa), + "More merging." + ), + + Expect1 = OneChild ++ Stemmed1aa, + etap:is( + {Expect1, conflicts}, + couch_key_tree:merge(OneChild, Stemmed1aa), + "Merging should create conflicts." + ), + + etap:is( + {TwoChild, no_conflicts}, + couch_key_tree:merge(Expect1, TwoChild), + "Merge should have no conflicts." + ), + + ok. diff --git a/apps/couch/test/etap/061-kt-missing-leaves.t b/apps/couch/test/etap/061-kt-missing-leaves.t new file mode 100755 index 00000000..d60b4db8 --- /dev/null +++ b/apps/couch/test/etap/061-kt-missing-leaves.t @@ -0,0 +1,65 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(4), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], + Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], + Stemmed2 = [{2, {"1aa", "bar", []}}], + + etap:is( + [], + couch_key_tree:find_missing(TwoChildSibs, [{0,"1"}, {1,"1a"}]), + "Look for missing keys." + ), + + etap:is( + [{0, "10"}, {100, "x"}], + couch_key_tree:find_missing( + TwoChildSibs, + [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}] + ), + "Look for missing keys." + ), + + etap:is( + [{0, "1"}, {100, "x"}], + couch_key_tree:find_missing( + Stemmed1, + [{0,"1"}, {1,"1a"}, {100, "x"}] + ), + "Look for missing keys." + ), + etap:is( + [{0, "1"}, {1,"1a"}, {100, "x"}], + couch_key_tree:find_missing( + Stemmed2, + [{0,"1"}, {1,"1a"}, {100, "x"}] + ), + "Look for missing keys." + ), + + ok. diff --git a/apps/couch/test/etap/062-kt-remove-leaves.t b/apps/couch/test/etap/062-kt-remove-leaves.t new file mode 100755 index 00000000..745a00be --- /dev/null +++ b/apps/couch/test/etap/062-kt-remove-leaves.t @@ -0,0 +1,69 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(6), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}], + TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], + Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], + + etap:is( + {TwoChildSibs, []}, + couch_key_tree:remove_leafs(TwoChildSibs, []), + "Removing no leaves has no effect on the tree." + ), + + etap:is( + {TwoChildSibs, []}, + couch_key_tree:remove_leafs(TwoChildSibs, [{0, "1"}]), + "Removing a non-existant branch has no effect." + ), + + etap:is( + {OneChild, [{1, "1b"}]}, + couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1b"}]), + "Removing a leaf removes the leaf." + ), + + etap:is( + {[], [{1, "1b"},{1, "1a"}]}, + couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1a"}, {1, "1b"}]), + "Removing all leaves returns an empty tree." + ), + + etap:is( + {Stemmed, []}, + couch_key_tree:remove_leafs(Stemmed, [{1, "1a"}]), + "Removing a non-existant node has no effect." + ), + + etap:is( + {[], [{2, "1aa"}]}, + couch_key_tree:remove_leafs(Stemmed, [{2, "1aa"}]), + "Removing the last leaf returns an empty tree." + ), + + ok. diff --git a/apps/couch/test/etap/063-kt-get-leaves.t b/apps/couch/test/etap/063-kt-get-leaves.t new file mode 100755 index 00000000..6d4e8007 --- /dev/null +++ b/apps/couch/test/etap/063-kt-get-leaves.t @@ -0,0 +1,98 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(11), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], + Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], + + etap:is( + {[{"foo", {0, ["1"]}}],[]}, + couch_key_tree:get(TwoChildSibs, [{0, "1"}]), + "extract a subtree." + ), + + etap:is( + {[{"bar", {1, ["1a", "1"]}}],[]}, + couch_key_tree:get(TwoChildSibs, [{1, "1a"}]), + "extract a subtree." + ), + + etap:is( + {[],[{0,"x"}]}, + couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}]), + "gather up the leaves." + ), + + etap:is( + {[{"bar", {1, ["1a","1"]}}],[]}, + couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}]), + "gather up the leaves." + ), + + etap:is( + {[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]}, + couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}]), + "gather up the leaves." + ), + + etap:is( + {[{0,[{"1", "foo"}]}],[]}, + couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}]), + "retrieve full key paths." + ), + + etap:is( + {[{1,[{"1a", "bar"},{"1", "foo"}]}],[]}, + couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}]), + "retrieve full key paths." + ), + + etap:is( + [{2, [{"1aa", "bar"},{"1a", "bar"}]}], + couch_key_tree:get_all_leafs_full(Stemmed), + "retrieve all leaves." + ), + + etap:is( + [{1, [{"1a", "bar"},{"1", "foo"}]}, {1, [{"1b", "bar"},{"1", "foo"}]}], + couch_key_tree:get_all_leafs_full(TwoChildSibs), + "retrieve all the leaves." + ), + + etap:is( + [{"bar", {2, ["1aa","1a"]}}], + couch_key_tree:get_all_leafs(Stemmed), + "retrieve all leaves." + ), + + etap:is( + [{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}], + couch_key_tree:get_all_leafs(TwoChildSibs), + "retrieve all the leaves." + ), + + ok. diff --git a/apps/couch/test/etap/064-kt-counting.t b/apps/couch/test/etap/064-kt-counting.t new file mode 100755 index 00000000..f182d287 --- /dev/null +++ b/apps/couch/test/etap/064-kt-counting.t @@ -0,0 +1,46 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(4), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + EmptyTree = [], + One = [{0, {"1","foo",[]}}], + TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], + Stemmed = [{2, {"1bb", "boo", []}}], + + etap:is(0, couch_key_tree:count_leafs(EmptyTree), + "Empty trees have no leaves."), + + etap:is(1, couch_key_tree:count_leafs(One), + "Single node trees have a single leaf."), + + etap:is(2, couch_key_tree:count_leafs(TwoChildSibs), + "Two children siblings counted as two leaves."), + + etap:is(1, couch_key_tree:count_leafs(Stemmed), + "Stemming does not affect leaf counting."), + + ok. diff --git a/apps/couch/test/etap/065-kt-stemming.t b/apps/couch/test/etap/065-kt-stemming.t new file mode 100755 index 00000000..6e781c1d --- /dev/null +++ b/apps/couch/test/etap/065-kt-stemming.t @@ -0,0 +1,42 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(3), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}], + Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], + Stemmed2 = [{2, {"1aa", "bar", []}}], + + etap:is(TwoChild, couch_key_tree:stem(TwoChild, 3), + "Stemming more levels than what exists does nothing."), + + etap:is(Stemmed1, couch_key_tree:stem(TwoChild, 2), + "Stemming with a depth of two returns the deepest two nodes."), + + etap:is(Stemmed2, couch_key_tree:stem(TwoChild, 1), + "Stemming to a depth of one returns the deepest node."), + + ok. diff --git a/apps/couch/test/etap/070-couch-db.t b/apps/couch/test/etap/070-couch-db.t new file mode 100755 index 00000000..4b14aba6 --- /dev/null +++ b/apps/couch/test/etap/070-couch-db.t @@ -0,0 +1,75 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + + etap:plan(4), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + + couch_server_sup:start_link( + ["etc/couchdb/default_dev.ini", "etc/couchdb/local_dev.ini"] + ), + + couch_db:create(<<"etap-test-db">>, []), + {ok, AllDbs} = couch_server:all_databases(), + etap:ok(lists:member(<<"etap-test-db">>, AllDbs), "Database was created."), + + couch_server:delete(<<"etap-test-db">>, []), + {ok, AllDbs2} = couch_server:all_databases(), + etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2), + "Database was deleted."), + + gen_server:call(couch_server, {set_max_dbs_open, 3}), + MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end, + + lists:foreach(fun(Int) -> + {ok, TestDbs} = couch_server:all_databases(), + ok = case lists:member(MkDbName(Int), TestDbs) of + true -> couch_server:delete(MkDbName(Int), []); + _ -> ok + end, + {ok, Db} = couch_db:create(MkDbName(Int), []), + ok = couch_db:close(Db) + end, lists:seq(1, 6)), + + {ok, AllDbs3} = couch_server:all_databases(), + NumCreated = lists:foldl(fun(Int, Acc) -> + true = lists:member(MkDbName(Int), AllDbs3), + Acc+1 + end, 0, lists:seq(1, 6)), + etap:is(6, NumCreated, "Created all databases."), + + lists:foreach(fun(Int) -> + ok = couch_server:delete(MkDbName(Int), []) + end, lists:seq(1, 6)), + + {ok, AllDbs4} = couch_server:all_databases(), + NumDeleted = lists:foldl(fun(Int, Acc) -> + false = lists:member(MkDbName(Int), AllDbs4), + Acc+1 + end, 0, lists:seq(1, 6)), + etap:is(6, NumDeleted, "Deleted all databases."), + + ok. diff --git a/apps/couch/test/etap/080-config-get-set.t b/apps/couch/test/etap/080-config-get-set.t new file mode 100755 index 00000000..a4a8577a --- /dev/null +++ b/apps/couch/test/etap/080-config-get-set.t @@ -0,0 +1,128 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +main(_) -> + test_util:init_code_path(), + etap:plan(12), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + % start couch_config with default + couch_config:start_link([default_config()]), + + + % Check that we can get values + + + etap:fun_is( + fun(List) -> length(List) > 0 end, + couch_config:all(), + "Data was loaded from the INI file." + ), + + etap:fun_is( + fun(List) -> length(List) > 0 end, + couch_config:get("daemons"), + "There are settings in the [daemons] section of the INI file." + ), + + etap:is( + couch_config:get("httpd_design_handlers", "_view"), + "{couch_httpd_view, handle_view_req}", + "The {httpd_design_handlers, view} is the expected default." + ), + + etap:is( + couch_config:get("httpd", "foo", "bar"), + "bar", + "Returns the default when key doesn't exist in config." + ), + + etap:is( + couch_config:get("httpd", "foo"), + undefined, + "The default default is the atom 'undefined'." + ), + + etap:is( + couch_config:get("httpd", "port", "bar"), + "5984", + "Only returns the default when the config setting does not exist." + ), + + + % Check that setting values works. + + + ok = couch_config:set("log", "level", "severe", false), + + etap:is( + couch_config:get("log", "level"), + "severe", + "Non persisted changes take effect." + ), + + etap:is( + couch_config:get("new_section", "bizzle"), + undefined, + "Section 'new_section' does not exist." + ), + + ok = couch_config:set("new_section", "bizzle", "bang", false), + + etap:is( + couch_config:get("new_section", "bizzle"), + "bang", + "New section 'new_section' was created for a new key/value pair." + ), + + + % Check that deleting works + + + ok = couch_config:delete("new_section", "bizzle", false), + etap:is( + couch_config:get("new_section", "bizzle"), + undefined, + "Deleting sets the value to \"\"" + ), + + + % Check ge/set/delete binary strings + + ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false), + etap:is( + couch_config:get(<<"foo">>, <<"bar">>), + <<"baz">>, + "Can get and set with binary section and key values." + ), + ok = couch_config:delete(<<"foo">>, <<"bar">>, false), + etap:is( + couch_config:get(<<"foo">>, <<"bar">>), + undefined, + "Deleting with binary section/key pairs sets the value to \"\"" + ), + + ok. diff --git a/apps/couch/test/etap/081-config-override.1.ini b/apps/couch/test/etap/081-config-override.1.ini new file mode 100644 index 00000000..55451dad --- /dev/null +++ b/apps/couch/test/etap/081-config-override.1.ini @@ -0,0 +1,22 @@ +; Licensed to the Apache Software Foundation (ASF) under one +; or more contributor license agreements. See the NOTICE file +; distributed with this work for additional information +; regarding copyright ownership. The ASF licenses this file +; to you under the Apache License, Version 2.0 (the +; "License"); you may not use this file except in compliance +; with the License. You may obtain a copy of the License at +; +; http://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, +; software distributed under the License is distributed on an +; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +; KIND, either express or implied. See the License for the +; specific language governing permissions and limitations +; under the License. + +[couchdb] +max_dbs_open=10 + +[httpd] +port=4895 diff --git a/apps/couch/test/etap/081-config-override.2.ini b/apps/couch/test/etap/081-config-override.2.ini new file mode 100644 index 00000000..5f46357f --- /dev/null +++ b/apps/couch/test/etap/081-config-override.2.ini @@ -0,0 +1,22 @@ +; Licensed to the Apache Software Foundation (ASF) under one +; or more contributor license agreements. See the NOTICE file +; distributed with this work for additional information +; regarding copyright ownership. The ASF licenses this file +; to you under the Apache License, Version 2.0 (the +; "License"); you may not use this file except in compliance +; with the License. You may obtain a copy of the License at +; +; http://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, +; software distributed under the License is distributed on an +; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +; KIND, either express or implied. See the License for the +; specific language governing permissions and limitations +; under the License. + +[httpd] +port = 80 + +[fizbang] +unicode = normalized diff --git a/apps/couch/test/etap/081-config-override.t b/apps/couch/test/etap/081-config-override.t new file mode 100755 index 00000000..01f8b4c2 --- /dev/null +++ b/apps/couch/test/etap/081-config-override.t @@ -0,0 +1,212 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +local_config_1() -> + test_util:source_file("test/etap/081-config-override.1.ini"). + +local_config_2() -> + test_util:source_file("test/etap/081-config-override.2.ini"). + +local_config_write() -> + test_util:build_file("test/etap/temp.081"). + +% Run tests and wait for the config gen_server to shutdown. +run_tests(IniFiles, Tests) -> + {ok, Pid} = couch_config:start_link(IniFiles), + erlang:monitor(process, Pid), + Tests(), + couch_config:stop(), + receive + {'DOWN', _, _, Pid, _} -> ok; + _Other -> etap:diag("OTHER: ~p~n", [_Other]) + after + 1000 -> throw({timeout_error, config_stop}) + end. + +main(_) -> + test_util:init_code_path(), + etap:plan(17), + + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + + CheckStartStop = fun() -> ok end, + run_tests([default_config()], CheckStartStop), + + CheckDefaults = fun() -> + etap:is( + couch_config:get("couchdb", "max_dbs_open"), + "100", + "{couchdb, max_dbs_open} is 100 by defualt." + ), + + etap:is( + couch_config:get("httpd","port"), + "5984", + "{httpd, port} is 5984 by default" + ), + + etap:is( + couch_config:get("fizbang", "unicode"), + undefined, + "{fizbang, unicode} is undefined by default" + ) + end, + + run_tests([default_config()], CheckDefaults), + + + % Check that subsequent files override values appropriately + + CheckOverride = fun() -> + etap:is( + couch_config:get("couchdb", "max_dbs_open"), + "10", + "{couchdb, max_dbs_open} was overriden with the value 10" + ), + + etap:is( + couch_config:get("httpd", "port"), + "4895", + "{httpd, port} was overriden with the value 4895" + ) + end, + + run_tests([default_config(), local_config_1()], CheckOverride), + + + % Check that overrides can create new sections + + CheckOverride2 = fun() -> + etap:is( + couch_config:get("httpd", "port"), + "80", + "{httpd, port} is overriden with the value 80" + ), + + etap:is( + couch_config:get("fizbang", "unicode"), + "normalized", + "{fizbang, unicode} was created by override INI file" + ) + end, + + run_tests([default_config(), local_config_2()], CheckOverride2), + + + % Check that values can be overriden multiple times + + CheckOverride3 = fun() -> + etap:is( + couch_config:get("httpd", "port"), + "80", + "{httpd, port} value was taken from the last specified INI file." + ) + end, + + run_tests( + [default_config(), local_config_1(), local_config_2()], + CheckOverride3 + ), + + % Check persistence to last file. + + % Empty the file in case it exists. + {ok, Fd} = file:open(local_config_write(), write), + ok = file:truncate(Fd), + ok = file:close(Fd), + + % Open and write a value + CheckCanWrite = fun() -> + etap:is( + couch_config:get("httpd", "port"), + "5984", + "{httpd, port} is still 5984 by default" + ), + + etap:is( + couch_config:set("httpd", "port", "8080"), + ok, + "Writing {httpd, port} is kosher." + ), + + etap:is( + couch_config:get("httpd", "port"), + "8080", + "{httpd, port} was updated to 8080 successfully." + ), + + etap:is( + couch_config:delete("httpd", "bind_address"), + ok, + "Deleting {httpd, bind_address} succeeds" + ), + + etap:is( + couch_config:get("httpd", "bind_address"), + undefined, + "{httpd, bind_address} was actually deleted." + ) + end, + + run_tests([default_config(), local_config_write()], CheckCanWrite), + + % Open and check where we don't expect persistence. + + CheckDidntWrite = fun() -> + etap:is( + couch_config:get("httpd", "port"), + "5984", + "{httpd, port} was not persisted to the primary INI file." + ), + + etap:is( + couch_config:get("httpd", "bind_address"), + "127.0.0.1", + "{httpd, bind_address} was not deleted form the primary INI file." + ) + end, + + run_tests([default_config()], CheckDidntWrite), + + % Open and check we have only the persistence we expect. + CheckDidWrite = fun() -> + etap:is( + couch_config:get("httpd", "port"), + "8080", + "{httpd, port} is still 8080 after reopening the config." + ), + + etap:is( + couch_config:get("httpd", "bind_address"), + undefined, + "{httpd, bind_address} is still \"\" after reopening." + ) + end, + + run_tests([local_config_write()], CheckDidWrite), + + ok. diff --git a/apps/couch/test/etap/082-config-register.t b/apps/couch/test/etap/082-config-register.t new file mode 100755 index 00000000..191ba8f8 --- /dev/null +++ b/apps/couch/test/etap/082-config-register.t @@ -0,0 +1,94 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +main(_) -> + test_util:init_code_path(), + etap:plan(5), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_config:start_link([default_config()]), + + etap:is( + couch_config:get("httpd", "port"), + "5984", + "{httpd, port} is 5984 by default." + ), + + ok = couch_config:set("httpd", "port", "4895", false), + + etap:is( + couch_config:get("httpd", "port"), + "4895", + "{httpd, port} changed to 4895" + ), + + SentinelFunc = fun() -> + % Ping/Pong to make sure we wait for this + % process to die + receive {ping, From} -> From ! pong end + end, + SentinelPid = spawn(SentinelFunc), + + couch_config:register( + fun("httpd", "port", Value) -> + etap:is(Value, "8080", "Registered function got notification.") + end, + SentinelPid + ), + + ok = couch_config:set("httpd", "port", "8080", false), + + % Implicitly checking that we *don't* call the function + etap:is( + couch_config:get("httpd", "bind_address"), + "127.0.0.1", + "{httpd, bind_address} is not '0.0.0.0'" + ), + ok = couch_config:set("httpd", "bind_address", "0.0.0.0", false), + + % Ping-Pong kill process + SentinelPid ! {ping, self()}, + receive + _Any -> ok + after 1000 -> + throw({timeout_error, registered_pid}) + end, + + ok = couch_config:set("httpd", "port", "80", false), + etap:is( + couch_config:get("httpd", "port"), + "80", + "Implicitly test that the function got de-registered" + ), + + % test passing of Persist flag + couch_config:register( + fun("httpd", _, _, Persist) -> + etap:is(Persist, false) + end), + ok = couch_config:set("httpd", "port", "80", false), + + ok. diff --git a/apps/couch/test/etap/083-config-no-files.t b/apps/couch/test/etap/083-config-no-files.t new file mode 100755 index 00000000..675feb59 --- /dev/null +++ b/apps/couch/test/etap/083-config-no-files.t @@ -0,0 +1,55 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +main(_) -> + test_util:init_code_path(), + etap:plan(3), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_config:start_link([]), + + etap:fun_is( + fun(KVPairs) -> length(KVPairs) == 0 end, + couch_config:all(), + "No INI files specified returns 0 key/value pairs." + ), + + ok = couch_config:set("httpd", "port", "80", false), + + etap:is( + couch_config:get("httpd", "port"), + "80", + "Created a new non-persisted k/v pair." + ), + + ok = couch_config:set("httpd", "bind_address", "127.0.0.1"), + etap:is( + couch_config:get("httpd", "bind_address"), + "127.0.0.1", + "Asking for a persistent key/value pair doesn't choke." + ), + + ok. diff --git a/apps/couch/test/etap/090-task-status.t b/apps/couch/test/etap/090-task-status.t new file mode 100755 index 00000000..b278de7f --- /dev/null +++ b/apps/couch/test/etap/090-task-status.t @@ -0,0 +1,209 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(16), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +check_status(Pid,ListPropLists) -> + From = list_to_binary(pid_to_list(Pid)), + Element = lists:foldl( + fun(PropList,Acc) -> + case couch_util:get_value(pid,PropList) of + From -> + [PropList | Acc]; + _ -> + [] + end + end, + [], ListPropLists + ), + couch_util:get_value(status,hd(Element)). + +loop() -> + receive + {add, From} -> + Resp = couch_task_status:add_task("type", "task", "init"), + From ! {ok, self(), Resp}, + loop(); + {update, Status, From} -> + Resp = couch_task_status:update(Status), + From ! {ok, self(), Resp}, + loop(); + {update_frequency, Msecs, From} -> + Resp = couch_task_status:set_update_frequency(Msecs), + From ! {ok, self(), Resp}, + loop(); + {done, From} -> + From ! {ok, self(), ok} + end. + +call(Pid, Command) -> + Pid ! {Command, self()}, + wait(Pid). + +call(Pid, Command, Arg) -> + Pid ! {Command, Arg, self()}, + wait(Pid). + +wait(Pid) -> + receive + {ok, Pid, Msg} -> Msg + after 1000 -> + throw(timeout_error) + end. + +test() -> + {ok, TaskStatusPid} = couch_task_status:start_link(), + + TaskUpdater = fun() -> loop() end, + % create three updaters + Pid1 = spawn(TaskUpdater), + Pid2 = spawn(TaskUpdater), + Pid3 = spawn(TaskUpdater), + + ok = call(Pid1, add), + etap:is( + length(couch_task_status:all()), + 1, + "Started a task" + ), + + etap:is( + call(Pid1, add), + {add_task_error, already_registered}, + "Unable to register multiple tasks for a single Pid." + ), + + etap:is( + check_status(Pid1, couch_task_status:all()), + <<"init">>, + "Task status was set to 'init'." + ), + + call(Pid1,update,"running"), + etap:is( + check_status(Pid1,couch_task_status:all()), + <<"running">>, + "Status updated to 'running'." + ), + + + call(Pid2,add), + etap:is( + length(couch_task_status:all()), + 2, + "Started a second task." + ), + + etap:is( + check_status(Pid2, couch_task_status:all()), + <<"init">>, + "Second tasks's status was set to 'init'." + ), + + call(Pid2, update, "running"), + etap:is( + check_status(Pid2, couch_task_status:all()), + <<"running">>, + "Second task's status updated to 'running'." + ), + + + call(Pid3, add), + etap:is( + length(couch_task_status:all()), + 3, + "Registered a third task." + ), + + etap:is( + check_status(Pid3, couch_task_status:all()), + <<"init">>, + "Third tasks's status was set to 'init'." + ), + + call(Pid3, update, "running"), + etap:is( + check_status(Pid3, couch_task_status:all()), + <<"running">>, + "Third task's status updated to 'running'." + ), + + + call(Pid3, update_frequency, 500), + call(Pid3, update, "still running"), + etap:is( + check_status(Pid3, couch_task_status:all()), + <<"still running">>, + "Third task's status updated to 'still running'." + ), + + call(Pid3, update, "skip this update"), + etap:is( + check_status(Pid3, couch_task_status:all()), + <<"still running">>, + "Status update dropped because of frequency limit." + ), + + call(Pid3, update_frequency, 0), + call(Pid3, update, "don't skip"), + etap:is( + check_status(Pid3, couch_task_status:all()), + <<"don't skip">>, + "Status updated after reseting frequency limit." + ), + + + call(Pid1, done), + etap:is( + length(couch_task_status:all()), + 2, + "First task finished." + ), + + call(Pid2, done), + etap:is( + length(couch_task_status:all()), + 1, + "Second task finished." + ), + + call(Pid3, done), + etap:is( + length(couch_task_status:all()), + 0, + "Third task finished." + ), + + erlang:monitor(process, TaskStatusPid), + couch_task_status:stop(), + receive + {'DOWN', _, _, TaskStatusPid, _} -> + ok + after + 1000 -> + throw(timeout_error) + end, + + ok. diff --git a/apps/couch/test/etap/100-ref-counter.t b/apps/couch/test/etap/100-ref-counter.t new file mode 100755 index 00000000..8f996d04 --- /dev/null +++ b/apps/couch/test/etap/100-ref-counter.t @@ -0,0 +1,114 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(8), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +loop() -> + receive + close -> ok + end. + +wait() -> + receive + {'DOWN', _, _, _, _} -> ok + after 1000 -> + throw(timeout_error) + end. + +test() -> + {ok, RefCtr} = couch_ref_counter:start([]), + + etap:is( + couch_ref_counter:count(RefCtr), + 1, + "A ref_counter is initialized with the calling process as a referer." + ), + + ChildPid1 = spawn(fun() -> loop() end), + + % This is largely implicit in that nothing else breaks + % as ok is just returned from gen_server:cast() + etap:is( + couch_ref_counter:drop(RefCtr, ChildPid1), + ok, + "Dropping an unknown Pid is ignored." + ), + + couch_ref_counter:add(RefCtr, ChildPid1), + etap:is( + couch_ref_counter:count(RefCtr), + 2, + "Adding a Pid to the ref_counter increases it's count." + ), + + couch_ref_counter:add(RefCtr, ChildPid1), + etap:is( + couch_ref_counter:count(RefCtr), + 2, + "Readding the same Pid maintains the count but increments it's refs." + ), + + couch_ref_counter:drop(RefCtr, ChildPid1), + etap:is( + couch_ref_counter:count(RefCtr), + 2, + "Droping the doubly added Pid only removes a ref, not a referer." + ), + + couch_ref_counter:drop(RefCtr, ChildPid1), + etap:is( + couch_ref_counter:count(RefCtr), + 1, + "Dropping the second ref drops the referer." + ), + + couch_ref_counter:add(RefCtr, ChildPid1), + etap:is( + couch_ref_counter:count(RefCtr), + 2, + "Sanity checking that the Pid was re-added." + ), + + erlang:monitor(process, ChildPid1), + ChildPid1 ! close, + wait(), + + CheckFun = fun + (Iter, nil) -> + case couch_ref_counter:count(RefCtr) of + 1 -> Iter; + _ -> nil + end; + (_, Acc) -> + Acc + end, + Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)), + etap:isnt( + Result, + nil, + "The referer count was decremented automatically on process exit." + ), + + ok. diff --git a/apps/couch/test/etap/110-replication-httpc.t b/apps/couch/test/etap/110-replication-httpc.t new file mode 100755 index 00000000..b534b648 --- /dev/null +++ b/apps/couch/test/etap/110-replication-httpc.t @@ -0,0 +1,134 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_rep.hrl") +-record(http_db, { + url, + auth = [], + resource = "", + headers = [ + {"User-Agent", "CouchDB/"++couch:version()}, + {"Accept", "application/json"}, + {"Accept-Encoding", "gzip"} + ], + qs = [], + method = get, + body = nil, + options = [ + {response_format,binary}, + {inactivity_timeout, 30000} + ], + retries = 10, + pause = 1, + conn = nil +}). + +server() -> "http://127.0.0.1:5984/". +dbname() -> "etap-test-db". + +config_files() -> + lists:map(fun test_util:build_file/1, [ + "etc/couchdb/default_dev.ini", + "etc/couchdb/local_dev.ini" + ]). + +main(_) -> + test_util:init_code_path(), + + etap:plan(6), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link(config_files()), + ibrowse:start(), + crypto:start(), + + couch_server:delete(list_to_binary(dbname()), []), + {ok, Db} = couch_db:create(list_to_binary(dbname()), []), + + test_welcome(), + test_binary_url(), + test_put(), + test_qs(), + test_db_exists(), + + couch_db:close(Db), + couch_server:delete(list_to_binary(dbname()), []), + ok. + +test_welcome() -> + WelcomeReq = #http_db{url=server()}, + Expect = {[ + {<<"couchdb">>, <<"Welcome">>}, + {<<"version">>, list_to_binary(couch:version())} + ]}, + etap:is( + couch_rep_httpc:request(WelcomeReq), + Expect, + "welcome request with url-as-list" + ). + +test_binary_url() -> + Req = #http_db{url=list_to_binary(server())}, + Expect = {[ + {<<"couchdb">>, <<"Welcome">>}, + {<<"version">>, list_to_binary(couch:version())} + ]}, + etap:is( + couch_rep_httpc:request(Req), + Expect, + "welcome request with url-as-binary" + ). + +test_put() -> + Req = #http_db{ + url = server() ++ dbname() ++ "/", + resource = "test_put", + body = {[{<<"foo">>, <<"bar">>}]}, + method = put + }, + {Resp} = couch_rep_httpc:request(Req), + etap:ok(couch_util:get_value(<<"ok">>, Resp), "ok:true on upload"), + etap:is(<<"test_put">>, couch_util:get_value(<<"id">>, Resp), "id is correct"). + +test_qs() -> + Req = #http_db{ + url = server() ++ dbname() ++ "/", + resource = "foo", + qs = [ + {bar, true}, + {baz, 1.03}, + {bif, mochijson2:encode(<<"1-23456">>)} + ] + }, + Expect = server() ++ dbname() ++ "/foo?bar=true&baz=1.03&bif=\"1-23456\"", + etap:is( + couch_rep_httpc:full_url(Req), + Expect, + "query-string proplist encoding ok" + ). + +test_db_exists() -> + Req1 = #http_db{url=server() ++ dbname() ++ "/"}, + Req2 = #http_db{url=server() ++ dbname() ++ "_foo/"}, + etap:is(couch_rep_httpc:db_exists(Req1), Req1, "db_exists true check"). + % etap:is(couch_rep_httpc:db_exists(Req2), false, "db_exists false check"). diff --git a/apps/couch/test/etap/111-replication-changes-feed.t b/apps/couch/test/etap/111-replication-changes-feed.t new file mode 100755 index 00000000..bca12bc7 --- /dev/null +++ b/apps/couch/test/etap/111-replication-changes-feed.t @@ -0,0 +1,254 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_db.hrl") +-record(doc, {id= <<"">>, revs={0, []}, body={[]}, + attachments=[], deleted=false, meta=[]}). + +-record(http_db, { + url, + auth = [], + resource = "", + headers = [ + {"User-Agent", "CouchDB/"++couch:version()}, + {"Accept", "application/json"}, + {"Accept-Encoding", "gzip"} + ], + qs = [], + method = get, + body = nil, + options = [ + {response_format,binary}, + {inactivity_timeout, 30000} + ], + retries = 10, + pause = 1, + conn = nil +}). + +config_files() -> + lists:map(fun test_util:build_file/1, [ + "etc/couchdb/default_dev.ini", + "etc/couchdb/local_dev.ini" + ]). + +main(_) -> + test_util:init_code_path(), + + etap:plan(13), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link(config_files()), + ibrowse:start(), + crypto:start(), + + couch_server:delete(<<"etap-test-db">>, []), + {ok, Db1} = couch_db:create(<<"etap-test-db">>, []), + test_all(local), + couch_db:close(Db1), + couch_server:delete(<<"etap-test-db">>, []), + + couch_server:delete(<<"etap-test-db">>, []), + {ok, Db2} = couch_db:create(<<"etap-test-db">>, []), + test_all(remote), + test_remote_only(), + couch_db:close(Db2), + couch_server:delete(<<"etap-test-db">>, []), + + ok. + +test_all(Type) -> + test_unchanged_db(Type), + test_simple_change(Type), + test_since_parameter(Type), + test_continuous_parameter(Type), + test_conflicts(Type), + test_deleted_conflicts(Type). + +test_remote_only() -> + test_chunk_reassembly(remote). + +test_unchanged_db(Type) -> + {ok, Pid} = start_changes_feed(Type, 0, false), + etap:is( + couch_rep_changes_feed:next(Pid), + complete, + io_lib:format( + "(~p) changes feed for unchanged DB is automatically complete", + [Type]) + ). + +test_simple_change(Type) -> + Expect = generate_change(), + {ok, Pid} = start_changes_feed(Type, 0, false), + etap:is( + {couch_rep_changes_feed:next(Pid), couch_rep_changes_feed:next(Pid)}, + {[Expect], complete}, + io_lib:format("(~p) change one document, get one row", [Type]) + ). + +test_since_parameter(Type) -> + {ok, Pid} = start_changes_feed(Type, get_update_seq(), false), + etap:is( + couch_rep_changes_feed:next(Pid), + complete, + io_lib:format( + "(~p) since query-string parameter allows us to skip changes", + [Type]) + ). + +test_continuous_parameter(Type) -> + {ok, Pid} = start_changes_feed(Type, get_update_seq(), true), + + % make the changes_feed request before the next update + Self = self(), + spawn(fun() -> + Change = couch_rep_changes_feed:next(Pid), + Self ! {actual, Change} + end), + + Expect = generate_change(), + etap:is( + receive {actual, Actual} -> Actual end, + [Expect], + io_lib:format( + "(~p) feed=continuous query-string parameter picks up new changes", + [Type]) + ), + + ok = couch_rep_changes_feed:stop(Pid). + +test_conflicts(Type) -> + Since = get_update_seq(), + Expect = generate_conflict(), + {ok, Pid} = start_changes_feed(Type, Since, false), + etap:is( + {couch_rep_changes_feed:next(Pid), couch_rep_changes_feed:next(Pid)}, + {[Expect], complete}, + io_lib:format("(~p) conflict revisions show up in feed", [Type]) + ). + +test_deleted_conflicts(Type) -> + Since = get_update_seq(), + {ExpectProps} = generate_conflict(), + + %% delete the conflict revision + Id = couch_util:get_value(<<"id">>, ExpectProps), + [Win, {[{<<"rev">>, Lose}]}] = couch_util:get_value(<<"changes">>, ExpectProps), + Doc = couch_doc:from_json_obj({[ + {<<"_id">>, Id}, + {<<"_rev">>, Lose}, + {<<"_deleted">>, true} + ]}), + Db = get_db(), + {ok, Rev} = couch_db:update_doc(Db, Doc, [full_commit]), + couch_db:close(Db), + + Expect = {[ + {<<"seq">>, get_update_seq()}, + {<<"id">>, Id}, + {<<"changes">>, [Win, {[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]} + ]}, + + {ok, Pid} = start_changes_feed(Type, Since, false), + etap:is( + {couch_rep_changes_feed:next(Pid), couch_rep_changes_feed:next(Pid)}, + {[Expect], complete}, + io_lib:format("(~p) deleted conflict revisions show up in feed", [Type]) + ). + +test_chunk_reassembly(Type) -> + Since = get_update_seq(), + Expect = [generate_change() || _I <- lists:seq(1,30)], + {ok, Pid} = start_changes_feed(Type, Since, false), + etap:is( + get_all_changes(Pid, []), + Expect, + io_lib:format("(~p) reassembles chunks split across TCP frames", + [Type]) + ). + +get_all_changes(Pid, Acc) -> + case couch_rep_changes_feed:next(Pid) of + complete -> + lists:flatten(lists:reverse(Acc)); + Else -> + get_all_changes(Pid, [Else|Acc]) + end. + +generate_change() -> + generate_change(couch_uuids:random()). + +generate_change(Id) -> + generate_change(Id, {[]}). + +generate_change(Id, EJson) -> + Doc = couch_doc:from_json_obj(EJson), + Db = get_db(), + {ok, Rev} = couch_db:update_doc(Db, Doc#doc{id = Id}, [full_commit]), + couch_db:close(Db), + {[ + {<<"seq">>, get_update_seq()}, + {<<"id">>, Id}, + {<<"changes">>, [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]} + ]}. + +generate_conflict() -> + Id = couch_uuids:random(), + Db = get_db(), + Doc1 = (couch_doc:from_json_obj({[<<"foo">>, <<"bar">>]}))#doc{id = Id}, + Doc2 = (couch_doc:from_json_obj({[<<"foo">>, <<"baz">>]}))#doc{id = Id}, + {ok, Rev1} = couch_db:update_doc(Db, Doc1, [full_commit]), + {ok, Rev2} = couch_db:update_doc(Db, Doc2, [full_commit, all_or_nothing]), + + %% relies on undocumented CouchDB conflict winner algo and revision sorting! + RevList = [{[{<<"rev">>, couch_doc:rev_to_str(R)}]} || R + <- lists:sort(fun(A,B) -> B>, get_update_seq()}, + {<<"id">>, Id}, + {<<"changes">>, RevList} + ]}. + +get_db() -> + {ok, Db} = couch_db:open(<<"etap-test-db">>, []), + Db. + +get_dbname(local) -> + "etap-test-db"; +get_dbname(remote) -> + "http://127.0.0.1:5984/etap-test-db/". + +get_update_seq() -> + Db = get_db(), + Seq = couch_db:get_update_seq(Db), + couch_db:close(Db), + Seq. + +start_changes_feed(local, Since, Continuous) -> + Props = [{<<"continuous">>, Continuous}], + couch_rep_changes_feed:start_link(self(), get_db(), Since, Props); +start_changes_feed(remote, Since, Continuous) -> + Props = [{<<"continuous">>, Continuous}], + Db = #http_db{url = get_dbname(remote)}, + couch_rep_changes_feed:start_link(self(), Db, Since, Props). diff --git a/apps/couch/test/etap/112-replication-missing-revs.t b/apps/couch/test/etap/112-replication-missing-revs.t new file mode 100755 index 00000000..ea8466f6 --- /dev/null +++ b/apps/couch/test/etap/112-replication-missing-revs.t @@ -0,0 +1,195 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_db.hrl") + +-record(doc, {id= <<"">>, revs={0, []}, body={[]}, + attachments=[], deleted=false, meta=[]}). + +-record(http_db, { + url, + auth = [], + resource = "", + headers = [ + {"User-Agent", "CouchDB/"++couch:version()}, + {"Accept", "application/json"}, + {"Accept-Encoding", "gzip"} + ], + qs = [], + method = get, + body = nil, + options = [ + {response_format,binary}, + {inactivity_timeout, 30000} + ], + retries = 10, + pause = 1, + conn = nil +}). + +config_files() -> + lists:map(fun test_util:build_file/1, [ + "etc/couchdb/default_dev.ini", + "etc/couchdb/local_dev.ini" + ]). + +main(_) -> + test_util:init_code_path(), + + etap:plan(12), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link(config_files()), + ibrowse:start(), + crypto:start(), + + couch_server:delete(<<"etap-test-source">>, []), + couch_server:delete(<<"etap-test-target">>, []), + + Dbs1 = setup(), + test_all(local, local), + ok = teardown(Dbs1), + + Dbs2 = setup(), + test_all(local, remote), + ok = teardown(Dbs2), + + Dbs3 = setup(), + test_all(remote, local), + ok = teardown(Dbs3), + + Dbs4 = setup(), + test_all(remote, remote), + ok = teardown(Dbs4), + + ok. + +test_all(SrcType, TgtType) -> + test_unchanged_db(SrcType, TgtType), + test_multiple_changes(SrcType, TgtType), + test_changes_not_missing(SrcType, TgtType). + +test_unchanged_db(SrcType, TgtType) -> + {ok, Pid1} = start_changes_feed(SrcType, 0, false), + {ok, Pid2} = start_missing_revs(TgtType, Pid1), + etap:is( + couch_rep_missing_revs:next(Pid2), + complete, + io_lib:format( + "(~p, ~p) no missing revs if source is unchanged", + [SrcType, TgtType]) + ). + +test_multiple_changes(SrcType, TgtType) -> + Expect = {2, [generate_change(), generate_change()]}, + {ok, Pid1} = start_changes_feed(SrcType, 0, false), + {ok, Pid2} = start_missing_revs(TgtType, Pid1), + etap:is( + get_all_missing_revs(Pid2, {0, []}), + Expect, + io_lib:format("(~p, ~p) add src docs, get missing tgt revs + high seq", + [SrcType, TgtType]) + ). + +test_changes_not_missing(SrcType, TgtType) -> + %% put identical changes on source and target + Id = couch_uuids:random(), + {Id, _Seq, [Rev]} = Expect = generate_change(Id, {[]}, get_db(source)), + {Id, _, [Rev]} = generate_change(Id, {[]}, get_db(target)), + + %% confirm that this change is not in missing revs feed + {ok, Pid1} = start_changes_feed(SrcType, 0, false), + {ok, Pid2} = start_missing_revs(TgtType, Pid1), + {HighSeq, AllRevs} = get_all_missing_revs(Pid2, {0, []}), + + %% etap:none/3 has a bug, so just define it correctly here + etap:is( + lists:member(Expect, AllRevs), + false, + io_lib:format( + "(~p, ~p) skip revs that already exist on target", + [SrcType, TgtType]) + ). + +generate_change() -> + generate_change(couch_uuids:random()). + +generate_change(Id) -> + generate_change(Id, {[]}). + +generate_change(Id, EJson) -> + generate_change(Id, EJson, get_db(source)). + +generate_change(Id, EJson, Db) -> + Doc = couch_doc:from_json_obj(EJson), + Seq = get_update_seq(), + {ok, Rev} = couch_db:update_doc(Db, Doc#doc{id = Id}, [full_commit]), + couch_db:close(Db), + {Id, Seq+1, [Rev]}. + +get_all_missing_revs(Pid, {HighSeq, Revs}) -> + case couch_rep_missing_revs:next(Pid) of + complete -> + {HighSeq, lists:flatten(lists:reverse(Revs))}; + {Seq, More} -> + get_all_missing_revs(Pid, {Seq, [More|Revs]}) + end. + +get_db(source) -> + {ok, Db} = couch_db:open(<<"etap-test-source">>, []), + Db; +get_db(target) -> + {ok, Db} = couch_db:open(<<"etap-test-target">>, []), + Db. + +get_update_seq() -> + Db = get_db(source), + Seq = couch_db:get_update_seq(Db), + couch_db:close(Db), + Seq. + +setup() -> + {ok, DbA} = couch_db:create(<<"etap-test-source">>, []), + {ok, DbB} = couch_db:create(<<"etap-test-target">>, []), + [DbA, DbB]. + +teardown([DbA, DbB]) -> + couch_db:close(DbA), + couch_db:close(DbB), + couch_server:delete(<<"etap-test-source">>, []), + couch_server:delete(<<"etap-test-target">>, []), + ok. + +start_changes_feed(local, Since, Continuous) -> + Props = [{<<"continuous">>, Continuous}], + couch_rep_changes_feed:start_link(self(), get_db(source), Since, Props); +start_changes_feed(remote, Since, Continuous) -> + Props = [{<<"continuous">>, Continuous}], + Db = #http_db{url = "http://127.0.0.1:5984/etap-test-source/"}, + couch_rep_changes_feed:start_link(self(), Db, Since, Props). + +start_missing_revs(local, Changes) -> + couch_rep_missing_revs:start_link(self(), get_db(target), Changes, []); +start_missing_revs(remote, Changes) -> + Db = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"}, + couch_rep_missing_revs:start_link(self(), Db, Changes, []). diff --git a/apps/couch/test/etap/113-replication-attachment-comp.t b/apps/couch/test/etap/113-replication-attachment-comp.t new file mode 100755 index 00000000..30f602ef --- /dev/null +++ b/apps/couch/test/etap/113-replication-attachment-comp.t @@ -0,0 +1,273 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-record(user_ctx, { + name = null, + roles = [], + handler +}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_a_name() -> + <<"couch_test_rep_att_comp_a">>. + +test_db_b_name() -> + <<"couch_test_rep_att_comp_b">>. + +main(_) -> + test_util:init_code_path(), + etap:plan(30), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link([default_config()]), + put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), + put(port, couch_config:get("httpd", "port", "5984")), + application:start(inets), + ibrowse:start(), + timer:sleep(1000), + + % + % test pull replication + % + + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + create_db(test_db_a_name()), + create_db(test_db_b_name()), + + % enable compression + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + % store doc with text attachment in DB A + put_text_att(test_db_a_name()), + + % disable attachment compression + couch_config:set("attachments", "compression_level", "0"), + + % do pull replication + do_pull_replication(test_db_a_name(), test_db_b_name()), + + % verify that DB B has the attachment stored in compressed form + check_att_is_compressed(test_db_b_name()), + check_server_can_decompress_att(test_db_b_name()), + check_att_stubs(test_db_a_name(), test_db_b_name()), + + % + % test push replication + % + + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + create_db(test_db_a_name()), + create_db(test_db_b_name()), + + % enable compression + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + % store doc with text attachment in DB A + put_text_att(test_db_a_name()), + + % disable attachment compression + couch_config:set("attachments", "compression_level", "0"), + + % do push replication + do_push_replication(test_db_a_name(), test_db_b_name()), + + % verify that DB B has the attachment stored in compressed form + check_att_is_compressed(test_db_b_name()), + check_server_can_decompress_att(test_db_b_name()), + check_att_stubs(test_db_a_name(), test_db_b_name()), + + timer:sleep(3000), % to avoid mochiweb socket closed exceptions + delete_db(test_db_a_name()), + delete_db(test_db_b_name()), + couch_server_sup:stop(), + ok. + +put_text_att(DbName) -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url(DbName) ++ "/testdoc1/readme.txt", [], + "text/plain", test_text_data()}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created text attachment"), + ok. + +do_pull_replication(SourceDbName, TargetDbName) -> + RepObj = {[ + {<<"source">>, list_to_binary(db_url(SourceDbName))}, + {<<"target">>, TargetDbName} + ]}, + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + post, + {rep_url(), [], + "application/json", list_to_binary(couch_util:json_encode(RepObj))}, + [], + [{sync, true}]), + etap:is(Code, 200, "Pull replication successfully triggered"), + Json = couch_util:json_decode(Body), + RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), + etap:is(RepOk, true, "Pull replication completed with success"), + ok. + +do_push_replication(SourceDbName, TargetDbName) -> + RepObj = {[ + {<<"source">>, SourceDbName}, + {<<"target">>, list_to_binary(db_url(TargetDbName))} + ]}, + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + post, + {rep_url(), [], + "application/json", list_to_binary(couch_util:json_encode(RepObj))}, + [], + [{sync, true}]), + etap:is(Code, 200, "Push replication successfully triggered"), + Json = couch_util:json_decode(Body), + RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), + etap:is(RepOk, true, "Push replication completed with success"), + ok. + +check_att_is_compressed(DbName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url(DbName) ++ "/testdoc1/readme.txt", + [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code for the attachment request is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "The attachment was received in compressed form"), + Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), + etap:is( + Uncompressed, + test_text_data(), + "The attachment content is valid after decompression at the client side" + ), + ok. + +check_server_can_decompress_att(DbName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url(DbName) ++ "/testdoc1/readme.txt", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code for the attachment request is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is( + Gziped, false, "The attachment was not received in compressed form" + ), + etap:is( + Body, + test_text_data(), + "The attachment content is valid after server decompression" + ), + ok. + +check_att_stubs(SourceDbName, TargetDbName) -> + {ok, {{_, Code1, _}, _Headers1, Body1}} = http:request( + get, + {db_url(SourceDbName) ++ "/testdoc1?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is( + Code1, + 200, + "HTTP response code is 200 for the source DB doc request" + ), + Json1 = couch_util:json_decode(Body1), + SourceAttStub = couch_util:get_nested_json_value( + Json1, + [<<"_attachments">>, <<"readme.txt">>] + ), + {ok, {{_, Code2, _}, _Headers2, Body2}} = http:request( + get, + {db_url(TargetDbName) ++ "/testdoc1?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is( + Code2, + 200, + "HTTP response code is 200 for the target DB doc request" + ), + Json2 = couch_util:json_decode(Body2), + TargetAttStub = couch_util:get_nested_json_value( + Json2, + [<<"_attachments">>, <<"readme.txt">>] + ), + IdenticalStubs = (SourceAttStub =:= TargetAttStub), + etap:is(IdenticalStubs, true, "Attachment stubs are identical"), + TargetAttStubLength = couch_util:get_nested_json_value( + TargetAttStub, + [<<"length">>] + ), + TargetAttStubEnc = couch_util:get_nested_json_value( + TargetAttStub, + [<<"encoding">>] + ), + etap:is( + TargetAttStubEnc, + <<"gzip">>, + "Attachment stub has encoding property set to gzip" + ), + TargetAttStubEncLength = couch_util:get_nested_json_value( + TargetAttStub, + [<<"encoded_length">>] + ), + EncLengthDefined = is_integer(TargetAttStubEncLength), + etap:is( + EncLengthDefined, + true, + "Stubs have the encoded_length field properly defined" + ), + EncLengthSmaller = (TargetAttStubEncLength < TargetAttStubLength), + etap:is( + EncLengthSmaller, + true, + "Stubs have the encoded_length field smaller than their length field" + ), + ok. + +admin_user_ctx() -> + {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. + +create_db(DbName) -> + {ok, _} = couch_db:create(DbName, [admin_user_ctx()]). + +delete_db(DbName) -> + couch_server:delete(DbName, [admin_user_ctx()]). + +db_url(DbName) -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ + binary_to_list(DbName). + +rep_url() -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/_replicate". + +test_text_data() -> + {ok, Data} = file:read_file(test_util:source_file("README")), + binary_to_list(Data). diff --git a/apps/couch/test/etap/120-stats-collect.t b/apps/couch/test/etap/120-stats-collect.t new file mode 100755 index 00000000..dee88765 --- /dev/null +++ b/apps/couch/test/etap/120-stats-collect.t @@ -0,0 +1,150 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +main(_) -> + test_util:init_code_path(), + etap:plan(11), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test() -> + couch_stats_collector:start(), + ok = test_counters(), + ok = test_abs_values(), + ok = test_proc_counting(), + ok = test_all(), + ok. + +test_counters() -> + AddCount = fun() -> couch_stats_collector:increment(foo) end, + RemCount = fun() -> couch_stats_collector:decrement(foo) end, + repeat(AddCount, 100), + repeat(RemCount, 25), + repeat(AddCount, 10), + repeat(RemCount, 5), + etap:is( + couch_stats_collector:get(foo), + 80, + "Incrememnt tracks correctly." + ), + + repeat(RemCount, 80), + etap:is( + couch_stats_collector:get(foo), + 0, + "Decremented to zaro." + ), + ok. + +test_abs_values() -> + lists:map(fun(Val) -> + couch_stats_collector:record(bar, Val) + end, lists:seq(1, 15)), + etap:is( + couch_stats_collector:get(bar), + lists:seq(1, 15), + "Absolute values are recorded correctly." + ), + + couch_stats_collector:clear(bar), + etap:is( + couch_stats_collector:get(bar), + nil, + "Absolute values are cleared correctly." + ), + ok. + +test_proc_counting() -> + Self = self(), + OnePid = spawn(fun() -> + couch_stats_collector:track_process_count(hoopla), + Self ! reporting, + receive sepuku -> ok end + end), + R1 = erlang:monitor(process, OnePid), + receive reporting -> ok end, + etap:is( + couch_stats_collector:get(hoopla), + 1, + "track_process_count incrememnts the counter." + ), + + TwicePid = spawn(fun() -> + couch_stats_collector:track_process_count(hoopla), + couch_stats_collector:track_process_count(hoopla), + Self ! reporting, + receive sepuku -> ok end + end), + R2 = erlang:monitor(process, TwicePid), + receive reporting -> ok end, + etap:is( + couch_stats_collector:get(hoopla), + 3, + "track_process_count allows more than one incrememnt per Pid" + ), + + OnePid ! sepuku, + receive {'DOWN', R1, _, _, _} -> ok end, + timer:sleep(250), + etap:is( + couch_stats_collector:get(hoopla), + 2, + "Process count is decremented when process exits." + ), + + TwicePid ! sepuku, + receive {'DOWN', R2, _, _, _} -> ok end, + timer:sleep(250), + etap:is( + couch_stats_collector:get(hoopla), + 0, + "Process count is decremented for each call to track_process_count." + ), + ok. + +test_all() -> + couch_stats_collector:record(bar, 0.0), + couch_stats_collector:record(bar, 1.0), + etap:is( + couch_stats_collector:all(), + [{foo, 0}, {hoopla, 0}, {bar, [1.0, 0.0]}], + "all/0 returns all counters and absolute values." + ), + + etap:is( + couch_stats_collector:all(incremental), + [{foo, 0}, {hoopla, 0}], + "all/1 returns only the specified type." + ), + + couch_stats_collector:record(zing, 90), + etap:is( + couch_stats_collector:all(absolute), + [{zing, [90]}, {bar, [1.0, 0.0]}], + "all/1 returns only the specified type." + ), + ok. + +repeat(_, 0) -> + ok; +repeat(Fun, Count) -> + Fun(), + repeat(Fun, Count-1). diff --git a/apps/couch/test/etap/121-stats-aggregates.cfg b/apps/couch/test/etap/121-stats-aggregates.cfg new file mode 100644 index 00000000..30e475da --- /dev/null +++ b/apps/couch/test/etap/121-stats-aggregates.cfg @@ -0,0 +1,19 @@ +% Licensed to the Apache Software Foundation (ASF) under one +% or more contributor license agreements. See the NOTICE file +% distributed with this work for additional information +% regarding copyright ownership. The ASF licenses this file +% to you under the Apache License, Version 2.0 (the +% "License"); you may not use this file except in compliance +% with the License. You may obtain a copy of the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, +% software distributed under the License is distributed on an +% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +% KIND, either express or implied. See the License for the +% specific language governing permissions and limitations +% under the License. + +{testing, stuff, "yay description"}. +{number, '11', "randomosity"}. diff --git a/apps/couch/test/etap/121-stats-aggregates.ini b/apps/couch/test/etap/121-stats-aggregates.ini new file mode 100644 index 00000000..cc5cd218 --- /dev/null +++ b/apps/couch/test/etap/121-stats-aggregates.ini @@ -0,0 +1,20 @@ +; Licensed to the Apache Software Foundation (ASF) under one +; or more contributor license agreements. See the NOTICE file +; distributed with this work for additional information +; regarding copyright ownership. The ASF licenses this file +; to you under the Apache License, Version 2.0 (the +; "License"); you may not use this file except in compliance +; with the License. You may obtain a copy of the License at +; +; http://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, +; software distributed under the License is distributed on an +; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +; KIND, either express or implied. See the License for the +; specific language governing permissions and limitations +; under the License. + +[stats] +rate = 10000000 ; We call collect_sample in testing +samples = [0, 1] diff --git a/apps/couch/test/etap/121-stats-aggregates.t b/apps/couch/test/etap/121-stats-aggregates.t new file mode 100755 index 00000000..d678aa9d --- /dev/null +++ b/apps/couch/test/etap/121-stats-aggregates.t @@ -0,0 +1,171 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +ini_file() -> + test_util:source_file("test/etap/121-stats-aggregates.ini"). + +cfg_file() -> + test_util:source_file("test/etap/121-stats-aggregates.cfg"). + +main(_) -> + test_util:init_code_path(), + etap:plan(17), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail() + end, + ok. + +test() -> + couch_config:start_link([ini_file()]), + couch_stats_collector:start(), + couch_stats_aggregator:start(cfg_file()), + ok = test_all_empty(), + ok = test_get_empty(), + ok = test_count_stats(), + ok = test_abs_stats(), + ok. + +test_all_empty() -> + {Aggs} = couch_stats_aggregator:all(), + + etap:is(length(Aggs), 2, "There are only two aggregate types in testing."), + etap:is( + couch_util:get_value(testing, Aggs), + {[{stuff, make_agg(<<"yay description">>, + null, null, null, null, null)}]}, + "{testing, stuff} is empty at start." + ), + etap:is( + couch_util:get_value(number, Aggs), + {[{'11', make_agg(<<"randomosity">>, + null, null, null, null, null)}]}, + "{number, '11'} is empty at start." + ), + ok. + +test_get_empty() -> + etap:is( + couch_stats_aggregator:get_json({testing, stuff}), + make_agg(<<"yay description">>, null, null, null, null, null), + "Getting {testing, stuff} returns an empty aggregate." + ), + etap:is( + couch_stats_aggregator:get_json({number, '11'}), + make_agg(<<"randomosity">>, null, null, null, null, null), + "Getting {number, '11'} returns an empty aggregate." + ), + ok. + +test_count_stats() -> + lists:foreach(fun(_) -> + couch_stats_collector:increment({testing, stuff}) + end, lists:seq(1, 100)), + couch_stats_aggregator:collect_sample(), + etap:is( + couch_stats_aggregator:get_json({testing, stuff}), + make_agg(<<"yay description">>, 100, 100, null, 100, 100), + "COUNT: Adding values changes the stats." + ), + etap:is( + couch_stats_aggregator:get_json({testing, stuff}, 1), + make_agg(<<"yay description">>, 100, 100, null, 100, 100), + "COUNT: Adding values changes stats for all times." + ), + + timer:sleep(500), + couch_stats_aggregator:collect_sample(), + etap:is( + couch_stats_aggregator:get_json({testing, stuff}), + make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100), + "COUNT: Removing values changes stats." + ), + etap:is( + couch_stats_aggregator:get_json({testing, stuff}, 1), + make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100), + "COUNT: Removing values changes stats for all times." + ), + + timer:sleep(600), + couch_stats_aggregator:collect_sample(), + etap:is( + couch_stats_aggregator:get_json({testing, stuff}), + make_agg(<<"yay description">>, 100, 33.333, 57.735, 0, 100), + "COUNT: Letting time passes doesn't remove data from time 0 aggregates" + ), + etap:is( + couch_stats_aggregator:get_json({testing, stuff}, 1), + make_agg(<<"yay description">>, 0, 0, 0, 0, 0), + "COUNT: Letting time pass removes data from other time aggregates." + ), + ok. + +test_abs_stats() -> + lists:foreach(fun(X) -> + couch_stats_collector:record({number, 11}, X) + end, lists:seq(0, 10)), + couch_stats_aggregator:collect_sample(), + etap:is( + couch_stats_aggregator:get_json({number, 11}), + make_agg(<<"randomosity">>, 5, 5, null, 5, 5), + "ABS: Adding values changes the stats." + ), + etap:is( + couch_stats_aggregator:get_json({number, 11}, 1), + make_agg(<<"randomosity">>, 5, 5, null, 5, 5), + "ABS: Adding values changes stats for all times." + ), + + timer:sleep(500), + couch_stats_collector:record({number, 11}, 15), + couch_stats_aggregator:collect_sample(), + etap:is( + couch_stats_aggregator:get_json({number, 11}), + make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15), + "ABS: New values changes stats" + ), + etap:is( + couch_stats_aggregator:get_json({number, 11}, 1), + make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15), + "ABS: Removing values changes stats for all times." + ), + + timer:sleep(600), + couch_stats_aggregator:collect_sample(), + etap:is( + couch_stats_aggregator:get_json({number, 11}), + make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15), + "ABS: Letting time passes doesn't remove data from time 0 aggregates" + ), + etap:is( + couch_stats_aggregator:get_json({number, 11}, 1), + make_agg(<<"randomosity">>, 15, 15, null, 15, 15), + "ABS: Letting time pass removes data from other time aggregates." + ), + ok. + +make_agg(Desc, Sum, Mean, StdDev, Min, Max) -> + {[ + {description, Desc}, + {current, Sum}, + {sum, Sum}, + {mean, Mean}, + {stddev, StdDev}, + {min, Min}, + {max, Max} + ]}. diff --git a/apps/couch/test/etap/130-attachments-md5.t b/apps/couch/test/etap/130-attachments-md5.t new file mode 100755 index 00000000..4c40f83a --- /dev/null +++ b/apps/couch/test/etap/130-attachments-md5.t @@ -0,0 +1,252 @@ +#!/usr/bin/env escript +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_name() -> + <<"etap-test-db">>. + +docid() -> + case get(docid) of + undefined -> + put(docid, 1), + "1"; + Count -> + put(docid, Count+1), + integer_to_list(Count+1) + end. + +main(_) -> + test_util:init_code_path(), + + etap:plan(16), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link([default_config()]), + Addr = couch_config:get("httpd", "bind_address", any), + Port = list_to_integer(couch_config:get("httpd", "port", "5984")), + put(addr, Addr), + put(port, Port), + timer:sleep(1000), + + couch_server:delete(test_db_name(), []), + couch_db:create(test_db_name(), []), + + test_identity_without_md5(), + test_chunked_without_md5(), + + test_identity_with_valid_md5(), + test_chunked_with_valid_md5_header(), + test_chunked_with_valid_md5_trailer(), + + test_identity_with_invalid_md5(), + test_chunked_with_invalid_md5_header(), + test_chunked_with_invalid_md5_trailer(), + + couch_server:delete(test_db_name(), []), + couch_server_sup:stop(), + ok. + +test_identity_without_md5() -> + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Content-Length: 34\r\n", + "\r\n", + "We all live in a yellow submarine!"], + + {Code, Json} = do_request(Data), + etap:is(Code, 201, "Stored with identity encoding and no MD5"), + etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). + +test_chunked_without_md5() -> + AttData = <<"We all live in a yellow submarine!">>, + <> = AttData, + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Transfer-Encoding: chunked\r\n", + "\r\n", + to_hex(size(Part1)), "\r\n", + Part1, "\r\n", + to_hex(size(Part2)), "\r\n", + Part2, "\r\n" + "0\r\n" + "\r\n"], + + {Code, Json} = do_request(Data), + etap:is(Code, 201, "Stored with chunked encoding and no MD5"), + etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). + +test_identity_with_valid_md5() -> + AttData = "We all live in a yellow submarine!", + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Content-Length: 34\r\n", + "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", + "\r\n", + AttData], + + {Code, Json} = do_request(Data), + etap:is(Code, 201, "Stored with identity encoding and valid MD5"), + etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). + +test_chunked_with_valid_md5_header() -> + AttData = <<"We all live in a yellow submarine!">>, + <> = AttData, + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Transfer-Encoding: chunked\r\n", + "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", + "\r\n", + to_hex(size(Part1)), "\r\n", + Part1, "\r\n", + to_hex(size(Part2)), "\r\n", + Part2, "\r\n", + "0\r\n", + "\r\n"], + + {Code, Json} = do_request(Data), + etap:is(Code, 201, "Stored with chunked encoding and valid MD5 header."), + etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). + +test_chunked_with_valid_md5_trailer() -> + AttData = <<"We all live in a yellow submarine!">>, + <> = AttData, + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Transfer-Encoding: chunked\r\n", + "Trailer: Content-MD5\r\n", + "\r\n", + to_hex(size(Part1)), "\r\n", + Part1, "\r\n", + to_hex(size(Part2)), "\r\n", + Part2, "\r\n", + "0\r\n", + "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", + "\r\n"], + + {Code, Json} = do_request(Data), + etap:is(Code, 201, "Stored with chunked encoding and valid MD5 trailer."), + etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). + +test_identity_with_invalid_md5() -> + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Content-Length: 34\r\n", + "Content-MD5: ", base64:encode(<<"foobar!">>), "\r\n", + "\r\n", + "We all live in a yellow submarine!"], + + {Code, Json} = do_request(Data), + etap:is(Code, 400, "Invalid MD5 header causes an error: identity"), + etap:is( + get_json(Json, [<<"error">>]), + <<"content_md5_mismatch">>, + "Body indicates reason for failure." + ). + +test_chunked_with_invalid_md5_header() -> + AttData = <<"We all live in a yellow submarine!">>, + <> = AttData, + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Transfer-Encoding: chunked\r\n", + "Content-MD5: ", base64:encode(<<"so sneaky...">>), "\r\n", + "\r\n", + to_hex(size(Part1)), "\r\n", + Part1, "\r\n", + to_hex(size(Part2)), "\r\n", + Part2, "\r\n", + "0\r\n", + "\r\n"], + + {Code, Json} = do_request(Data), + etap:is(Code, 400, "Invalid MD5 header causes an error: chunked"), + etap:is( + get_json(Json, [<<"error">>]), + <<"content_md5_mismatch">>, + "Body indicates reason for failure." + ). + +test_chunked_with_invalid_md5_trailer() -> + AttData = <<"We all live in a yellow submarine!">>, + <> = AttData, + Data = [ + "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", + "Content-Type: text/plain\r\n", + "Transfer-Encoding: chunked\r\n", + "Trailer: Content-MD5\r\n", + "\r\n", + to_hex(size(Part1)), "\r\n", + Part1, "\r\n", + to_hex(size(Part2)), "\r\n", + Part2, "\r\n", + "0\r\n", + "Content-MD5: ", base64:encode(<<"Kool-Aid Fountain!">>), "\r\n", + "\r\n"], + + {Code, Json} = do_request(Data), + etap:is(Code, 400, "Invalid MD5 Trailer causes an error"), + etap:is( + get_json(Json, [<<"error">>]), + <<"content_md5_mismatch">>, + "Body indicates reason for failure." + ). + + +get_socket() -> + Options = [binary, {packet, 0}, {active, false}], + {ok, Sock} = gen_tcp:connect(get(addr), get(port), Options), + Sock. + +do_request(Request) -> + Sock = get_socket(), + gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))), + timer:sleep(1000), + {ok, R} = gen_tcp:recv(Sock, 0), + gen_tcp:close(Sock), + [Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]), + {ok, {http_response, _, Code, _}, _} = + erlang:decode_packet(http, Header, []), + Json = couch_util:json_decode(Body), + {Code, Json}. + +get_json(Json, Path) -> + couch_util:get_nested_json_value(Json, Path). + +to_hex(Val) -> + to_hex(Val, []). + +to_hex(0, Acc) -> + Acc; +to_hex(Val, Acc) -> + to_hex(Val div 16, [hex_char(Val rem 16) | Acc]). + +hex_char(V) when V < 10 -> $0 + V; +hex_char(V) -> $A + V - 10. + diff --git a/apps/couch/test/etap/140-attachment-comp.t b/apps/couch/test/etap/140-attachment-comp.t new file mode 100755 index 00000000..98d37abc --- /dev/null +++ b/apps/couch/test/etap/140-attachment-comp.t @@ -0,0 +1,711 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_name() -> + <<"couch_test_atts_compression">>. + +main(_) -> + test_util:init_code_path(), + + etap:plan(78), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link([default_config()]), + put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), + put(port, couch_config:get("httpd", "port", "5984")), + application:start(inets), + timer:sleep(1000), + couch_server:delete(test_db_name(), []), + couch_db:create(test_db_name(), []), + + couch_config:set("attachments", "compression_level", "8"), + couch_config:set("attachments", "compressible_types", "text/*"), + + create_1st_text_att(), + create_1st_png_att(), + create_2nd_text_att(), + create_2nd_png_att(), + + tests_for_1st_text_att(), + tests_for_1st_png_att(), + tests_for_2nd_text_att(), + tests_for_2nd_png_att(), + + create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"), + test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"), + + test_create_already_compressed_att_with_invalid_content_encoding( + db_url() ++ "/doc_att_deflate", + "readme.txt", + zlib:compress(test_text_data()), + "deflate" + ), + + test_create_already_compressed_att_with_invalid_content_encoding( + db_url() ++ "/doc_att_compress", + "readme.txt", + % Note: As of OTP R13B04, it seems there's no LZW compression + % (i.e. UNIX compress utility implementation) lib in OTP. + % However there's a simple working Erlang implementation at: + % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php + test_text_data(), + "compress" + ), + + timer:sleep(3000), % to avoid mochiweb socket closed exceptions + couch_server:delete(test_db_name(), []), + couch_server_sup:stop(), + ok. + +db_url() -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ + binary_to_list(test_db_name()). + +create_1st_text_att() -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc1/readme.txt", [], + "text/plain", test_text_data()}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created text attachment using the standalone api"), + ok. + +create_1st_png_att() -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc2/icon.png", [], + "image/png", test_png_data()}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created png attachment using the standalone api"), + ok. + +% create a text attachment using the non-standalone attachment api +create_2nd_text_att() -> + DocJson = {[ + {<<"_attachments">>, {[ + {<<"readme.txt">>, {[ + {<<"content_type">>, <<"text/plain">>}, + {<<"data">>, base64:encode(test_text_data())} + ]} + }]}} + ]}, + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc3", [], + "application/json", list_to_binary(couch_util:json_encode(DocJson))}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created text attachment using the non-standalone api"), + ok. + +% create a png attachment using the non-standalone attachment api +create_2nd_png_att() -> + DocJson = {[ + {<<"_attachments">>, {[ + {<<"icon.png">>, {[ + {<<"content_type">>, <<"image/png">>}, + {<<"data">>, base64:encode(test_png_data())} + ]} + }]}} + ]}, + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {db_url() ++ "/testdoc4", [], + "application/json", list_to_binary(couch_util:json_encode(DocJson))}, + [], + [{sync, true}]), + etap:is(Code, 201, "Created png attachment using the non-standalone api"), + ok. + +create_already_compressed_att(DocUri, AttName) -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {DocUri ++ "/" ++ AttName, [{"Content-Encoding", "gzip"}], + "text/plain", zlib:gzip(test_text_data())}, + [], + [{sync, true}]), + etap:is( + Code, + 201, + "Created already compressed attachment using the standalone api" + ), + ok. + +tests_for_1st_text_att() -> + test_get_1st_text_att_with_accept_encoding_gzip(), + test_get_1st_text_att_without_accept_encoding_header(), + test_get_1st_text_att_with_accept_encoding_deflate(), + test_get_1st_text_att_with_accept_encoding_deflate_only(), + test_get_doc_with_1st_text_att(), + test_1st_text_att_stub(). + +tests_for_1st_png_att() -> + test_get_1st_png_att_without_accept_encoding_header(), + test_get_1st_png_att_with_accept_encoding_gzip(), + test_get_1st_png_att_with_accept_encoding_deflate(), + test_get_doc_with_1st_png_att(), + test_1st_png_att_stub(). + +tests_for_2nd_text_att() -> + test_get_2nd_text_att_with_accept_encoding_gzip(), + test_get_2nd_text_att_without_accept_encoding_header(), + test_get_doc_with_2nd_text_att(), + test_2nd_text_att_stub(). + +tests_for_2nd_png_att() -> + test_get_2nd_png_att_without_accept_encoding_header(), + test_get_2nd_png_att_with_accept_encoding_gzip(), + test_get_doc_with_2nd_png_att(), + test_2nd_png_att_stub(). + +test_get_1st_text_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "received body is gziped"), + Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), + etap:is( + Uncompressed, + test_text_data(), + "received data for the 1st text attachment is ok" + ), + ok. + +test_get_1st_text_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_text_data(), + "received data for the 1st text attachment is ok" + ), + ok. + +test_get_1st_text_att_with_accept_encoding_deflate() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "deflate"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + Deflated = lists:member({"content-encoding", "deflate"}, Headers), + etap:is(Deflated, false, "received body is not deflated"), + etap:is( + Body, + test_text_data(), + "received data for the 1st text attachment is ok" + ), + ok. + +test_get_1st_text_att_with_accept_encoding_deflate_only() -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + get, + {db_url() ++ "/testdoc1/readme.txt", + [{"Accept-Encoding", "deflate, *;q=0"}]}, + [], + [{sync, true}]), + etap:is( + Code, + 406, + "HTTP response code is 406 for an unsupported content encoding request" + ), + ok. + +test_get_1st_png_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2/icon.png", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 1st png attachment is ok" + ), + ok. + +test_get_1st_png_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 1st png attachment is ok" + ), + ok. + +test_get_1st_png_att_with_accept_encoding_deflate() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "deflate"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Deflated = lists:member({"content-encoding", "deflate"}, Headers), + etap:is(Deflated, false, "received body is not deflated"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 1st png attachment is ok" + ), + ok. + +test_get_doc_with_1st_text_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + TextAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttType = couch_util:get_nested_json_value( + TextAttJson, + [<<"content_type">>] + ), + TextAttData = couch_util:get_nested_json_value( + TextAttJson, + [<<"data">>] + ), + etap:is( + TextAttType, + <<"text/plain">>, + "1st text attachment has type text/plain" + ), + %% check the attachment's data is the base64 encoding of the plain text + %% and not the base64 encoding of the gziped plain text + etap:is( + TextAttData, + base64:encode(test_text_data()), + "1st text attachment data is properly base64 encoded" + ), + ok. + +test_1st_text_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc1?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {TextAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttLength = couch_util:get_value(<<"length">>, TextAttJson), + etap:is( + TextAttLength, + length(test_text_data()), + "1st text attachment stub length matches the uncompressed length" + ), + TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson), + etap:is( + TextAttEncoding, + <<"gzip">>, + "1st text attachment stub has the encoding field set to gzip" + ), + TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson), + etap:is( + TextAttEncLength, + iolist_size(zlib:gzip(test_text_data())), + "1st text attachment stub encoded_length matches the compressed length" + ), + ok. + +test_get_doc_with_1st_png_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + PngAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttType = couch_util:get_nested_json_value( + PngAttJson, + [<<"content_type">>] + ), + PngAttData = couch_util:get_nested_json_value( + PngAttJson, + [<<"data">>] + ), + etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"), + etap:is( + PngAttData, + base64:encode(test_png_data()), + "1st png attachment data is properly base64 encoded" + ), + ok. + +test_1st_png_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc2?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {PngAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttLength = couch_util:get_value(<<"length">>, PngAttJson), + etap:is( + PngAttLength, + length(test_png_data()), + "1st png attachment stub length matches the uncompressed length" + ), + PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson), + etap:is( + PngEncoding, + undefined, + "1st png attachment stub doesn't have an encoding field" + ), + PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson), + etap:is( + PngEncLength, + undefined, + "1st png attachment stub doesn't have an encoded_length field" + ), + ok. + +test_get_2nd_text_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3/readme.txt", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "received body is gziped"), + Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), + etap:is( + Uncompressed, + test_text_data(), + "received data for the 2nd text attachment is ok" + ), + ok. + +test_get_2nd_text_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3/readme.txt", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_text_data(), + "received data for the 2nd text attachment is ok" + ), + ok. + +test_get_2nd_png_att_without_accept_encoding_header() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4/icon.png", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 2nd png attachment is ok" + ), + ok. + +test_get_2nd_png_att_with_accept_encoding_gzip() -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4/icon.png", [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + Body, + test_png_data(), + "received data for the 2nd png attachment is ok" + ), + ok. + +test_get_doc_with_2nd_text_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + TextAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttType = couch_util:get_nested_json_value( + TextAttJson, + [<<"content_type">>] + ), + TextAttData = couch_util:get_nested_json_value( + TextAttJson, + [<<"data">>] + ), + etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"), + %% check the attachment's data is the base64 encoding of the plain text + %% and not the base64 encoding of the gziped plain text + etap:is( + TextAttData, + base64:encode(test_text_data()), + "2nd text attachment data is properly base64 encoded" + ), + ok. + +test_2nd_text_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc3?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {TextAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"readme.txt">>] + ), + TextAttLength = couch_util:get_value(<<"length">>, TextAttJson), + etap:is( + TextAttLength, + length(test_text_data()), + "2nd text attachment stub length matches the uncompressed length" + ), + TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson), + etap:is( + TextAttEncoding, + <<"gzip">>, + "2nd text attachment stub has the encoding field set to gzip" + ), + TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson), + etap:is( + TextAttEncLength, + iolist_size(zlib:gzip(test_text_data())), + "2nd text attachment stub encoded_length matches the compressed length" + ), + ok. + +test_get_doc_with_2nd_png_att() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4?attachments=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + PngAttJson = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttType = couch_util:get_nested_json_value( + PngAttJson, + [<<"content_type">>] + ), + PngAttData = couch_util:get_nested_json_value( + PngAttJson, + [<<"data">>] + ), + etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"), + etap:is( + PngAttData, + base64:encode(test_png_data()), + "2nd png attachment data is properly base64 encoded" + ), + ok. + +test_2nd_png_att_stub() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/testdoc4?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {PngAttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, <<"icon.png">>] + ), + PngAttLength = couch_util:get_value(<<"length">>, PngAttJson), + etap:is( + PngAttLength, + length(test_png_data()), + "2nd png attachment stub length matches the uncompressed length" + ), + PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson), + etap:is( + PngEncoding, + undefined, + "2nd png attachment stub doesn't have an encoding field" + ), + PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson), + etap:is( + PngEncLength, + undefined, + "2nd png attachment stub doesn't have an encoded_length field" + ), + ok. + +test_already_compressed_att(DocUri, AttName) -> + test_get_already_compressed_att_with_accept_gzip(DocUri, AttName), + test_get_already_compressed_att_without_accept(DocUri, AttName), + test_get_already_compressed_att_stub(DocUri, AttName). + +test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {DocUri ++ "/" ++ AttName, [{"Accept-Encoding", "gzip"}]}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, true, "received body is gziped"), + etap:is( + iolist_to_binary(Body), + iolist_to_binary(zlib:gzip(test_text_data())), + "received data for the already compressed attachment is ok" + ), + ok. + +test_get_already_compressed_att_without_accept(DocUri, AttName) -> + {ok, {{_, Code, _}, Headers, Body}} = http:request( + get, + {DocUri ++ "/" ++ AttName, []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Gziped = lists:member({"content-encoding", "gzip"}, Headers), + etap:is(Gziped, false, "received body is not gziped"), + etap:is( + iolist_to_binary(Body), + iolist_to_binary(test_text_data()), + "received data for the already compressed attachment is ok" + ), + ok. + +test_get_already_compressed_att_stub(DocUri, AttName) -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {DocUri ++ "?att_encoding_info=true", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "HTTP response code is 200"), + Json = couch_util:json_decode(Body), + {AttJson} = couch_util:get_nested_json_value( + Json, + [<<"_attachments">>, iolist_to_binary(AttName)] + ), + AttLength = couch_util:get_value(<<"length">>, AttJson), + etap:is( + AttLength, + iolist_size((zlib:gzip(test_text_data()))), + "Already compressed attachment stub length matches the " + "compressed length" + ), + Encoding = couch_util:get_value(<<"encoding">>, AttJson), + etap:is( + Encoding, + <<"gzip">>, + "Already compressed attachment stub has the encoding field set to gzip" + ), + EncLength = couch_util:get_value(<<"encoded_length">>, AttJson), + etap:is( + EncLength, + AttLength, + "Already compressed attachment stub encoded_length matches the " + "length field value" + ), + ok. + +test_create_already_compressed_att_with_invalid_content_encoding( + DocUri, AttName, AttData, Encoding) -> + {ok, {{_, Code, _}, _Headers, _Body}} = http:request( + put, + {DocUri ++ "/" ++ AttName, [{"Content-Encoding", Encoding}], + "text/plain", AttData}, + [], + [{sync, true}]), + etap:is( + Code, + 415, + "Couldn't create an already compressed attachment using the " + "unsupported encoding '" ++ Encoding ++ "'" + ), + ok. + +test_png_data() -> + {ok, Data} = file:read_file( + test_util:source_file("share/www/image/logo.png") + ), + binary_to_list(Data). + +test_text_data() -> + {ok, Data} = file:read_file( + test_util:source_file("README") + ), + binary_to_list(Data). diff --git a/apps/couch/test/etap/150-invalid-view-seq.t b/apps/couch/test/etap/150-invalid-view-seq.t new file mode 100755 index 00000000..0664c116 --- /dev/null +++ b/apps/couch/test/etap/150-invalid-view-seq.t @@ -0,0 +1,192 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-record(user_ctx, { + name = null, + roles = [], + handler +}). + +default_config() -> + test_util:build_file("etc/couchdb/default_dev.ini"). + +test_db_name() -> + <<"couch_test_invalid_view_seq">>. + +main(_) -> + test_util:init_code_path(), + + etap:plan(10), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +%% NOTE: since during the test we stop the server, +%% a huge and ugly but harmless stack trace is sent to stderr +%% +test() -> + couch_server_sup:start_link([default_config()]), + timer:sleep(1000), + delete_db(), + create_db(), + + create_docs(), + create_design_doc(), + + % make DB file backup + backup_db_file(), + + put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), + put(port, couch_config:get("httpd", "port", "5984")), + application:start(inets), + + create_new_doc(), + query_view_before_restore_backup(), + + % restore DB file backup after querying view + restore_backup_db_file(), + + query_view_after_restore_backup(), + + delete_db(), + couch_server_sup:stop(), + ok. + +admin_user_ctx() -> + {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. + +create_db() -> + {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]). + +delete_db() -> + couch_server:delete(test_db_name(), [admin_user_ctx()]). + +create_docs() -> + {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), + Doc1 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc1">>}, + {<<"value">>, 1} + + ]}), + Doc2 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc2">>}, + {<<"value">>, 2} + + ]}), + Doc3 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc3">>}, + {<<"value">>, 3} + + ]}), + {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]), + couch_db:ensure_full_commit(Db), + couch_db:close(Db). + +create_design_doc() -> + {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), + DDoc = couch_doc:from_json_obj({[ + {<<"_id">>, <<"_design/foo">>}, + {<<"language">>, <<"javascript">>}, + {<<"views">>, {[ + {<<"bar">>, {[ + {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>} + ]}} + ]}} + ]}), + {ok, _} = couch_db:update_docs(Db, [DDoc]), + couch_db:ensure_full_commit(Db), + couch_db:close(Db). + +backup_db_file() -> + DbFile = test_util:build_file("tmp/lib/" ++ + binary_to_list(test_db_name()) ++ ".couch"), + {ok, _} = file:copy(DbFile, DbFile ++ ".backup"), + ok. + +create_new_doc() -> + {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), + Doc666 = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc666">>}, + {<<"value">>, 999} + + ]}), + {ok, _} = couch_db:update_docs(Db, [Doc666]), + couch_db:ensure_full_commit(Db), + couch_db:close(Db). + +db_url() -> + "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ + binary_to_list(test_db_name()). + +query_view_before_restore_backup() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/_design/foo/_view/bar", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "Got view response before restoring backup."), + ViewJson = couch_util:json_decode(Body), + Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]), + HasDoc1 = has_doc("doc1", Rows), + HasDoc2 = has_doc("doc2", Rows), + HasDoc3 = has_doc("doc3", Rows), + HasDoc666 = has_doc("doc666", Rows), + etap:is(HasDoc1, true, "Before backup restore, view has doc1"), + etap:is(HasDoc2, true, "Before backup restore, view has doc2"), + etap:is(HasDoc3, true, "Before backup restore, view has doc3"), + etap:is(HasDoc666, true, "Before backup restore, view has doc666"), + ok. + +has_doc(DocId1, Rows) -> + DocId = iolist_to_binary(DocId1), + lists:any( + fun({R}) -> lists:member({<<"id">>, DocId}, R) end, + Rows + ). + +restore_backup_db_file() -> + couch_server_sup:stop(), + timer:sleep(3000), + DbFile = test_util:build_file("tmp/lib/" ++ + binary_to_list(test_db_name()) ++ ".couch"), + ok = file:delete(DbFile), + ok = file:rename(DbFile ++ ".backup", DbFile), + couch_server_sup:start_link([default_config()]), + timer:sleep(1000), + ok. + +query_view_after_restore_backup() -> + {ok, {{_, Code, _}, _Headers, Body}} = http:request( + get, + {db_url() ++ "/_design/foo/_view/bar", []}, + [], + [{sync, true}]), + etap:is(Code, 200, "Got view response after restoring backup."), + ViewJson = couch_util:json_decode(Body), + Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]), + HasDoc1 = has_doc("doc1", Rows), + HasDoc2 = has_doc("doc2", Rows), + HasDoc3 = has_doc("doc3", Rows), + HasDoc666 = has_doc("doc666", Rows), + etap:is(HasDoc1, true, "After backup restore, view has doc1"), + etap:is(HasDoc2, true, "After backup restore, view has doc2"), + etap:is(HasDoc3, true, "After backup restore, view has doc3"), + etap:is(HasDoc666, false, "After backup restore, view does not have doc666"), + ok. diff --git a/apps/couch/test/etap/160-vhosts.t b/apps/couch/test/etap/160-vhosts.t new file mode 100755 index 00000000..7694010a --- /dev/null +++ b/apps/couch/test/etap/160-vhosts.t @@ -0,0 +1,131 @@ +#!/usr/bin/env escript +%% -*- erlang -*- + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +%% XXX: Figure out how to -include("couch_rep.hrl") +-record(http_db, { + url, + auth = [], + resource = "", + headers = [ + {"User-Agent", "CouchDB/"++couch:version()}, + {"Accept", "application/json"}, + {"Accept-Encoding", "gzip"} + ], + qs = [], + method = get, + body = nil, + options = [ + {response_format,binary}, + {inactivity_timeout, 30000} + ], + retries = 10, + pause = 1, + conn = nil +}). + +-record(user_ctx, { + name = null, + roles = [], + handler +}). + +server() -> "http://127.0.0.1:5984/". +dbname() -> "etap-test-db". +admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. + +config_files() -> + lists:map(fun test_util:build_file/1, [ + "etc/couchdb/default_dev.ini", + "etc/couchdb/local_dev.ini" + ]). + +main(_) -> + test_util:init_code_path(), + + etap:plan(4), + case (catch test()) of + ok -> + etap:end_tests(); + Other -> + etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), + etap:bail(Other) + end, + ok. + +test() -> + couch_server_sup:start_link(config_files()), + ibrowse:start(), + crypto:start(), + + couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]), + {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]), + + Doc = couch_doc:from_json_obj({[ + {<<"_id">>, <<"doc1">>}, + {<<"value">>, 666} + ]}), + {ok, _} = couch_db:update_docs(Db, [Doc]), + couch_db:ensure_full_commit(Db), + + %% end boilerplate, start test + + couch_config:set("vhosts", "example.com", "/etap-test-db", false), + test_regular_request(), + test_vhost_request(), + test_vhost_request_with_qs(), + test_vhost_request_with_global(), + + %% restart boilerplate + couch_db:close(Db), + couch_server:delete(list_to_binary(dbname()), []), + ok. + +test_regular_request() -> + case ibrowse:send_req(server(), [], get, []) of + {ok, _, _, Body} -> + {[{<<"couchdb">>, <<"Welcome">>}, + {<<"version">>,_} + ]} = couch_util:json_decode(Body), + etap:is(true, true, "should return server info"); + _Else -> false + end. + +test_vhost_request() -> + case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of + {ok, _, _, Body} -> + {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]} + = couch_util:json_decode(Body), + etap:is(true, true, "should return database info"); + _Else -> false + end. + +test_vhost_request_with_qs() -> + Url = server() ++ "doc1?revs_info=true", + case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of + {ok, _, _, Body} -> + {JsonProps} = couch_util:json_decode(Body), + HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps), + etap:is(HasRevsInfo, true, "should return _revs_info"); + _Else -> false + end. + +test_vhost_request_with_global() -> + Url2 = server() ++ "_utils/index.html", + case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of + {ok, _, _, Body2} -> + " false + end. diff --git a/apps/couch/test/etap/run.tpl b/apps/couch/test/etap/run.tpl new file mode 100644 index 00000000..faf0f456 --- /dev/null +++ b/apps/couch/test/etap/run.tpl @@ -0,0 +1,27 @@ +#!/bin/sh -e + +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +SRCDIR="%abs_top_srcdir%" +BUILDIR="%abs_top_builddir%" + +export ERL_FLAGS="$ERL_FLAGS -pa $BUILDIR/test/etap/" + +if test $# -gt 0; then + while [ $# -gt 0 ]; do + $1 + shift + done +else + prove $SRCDIR/test/etap/*.t +fi diff --git a/apps/couch/test/etap/test_util.erl.in b/apps/couch/test/etap/test_util.erl.in new file mode 100644 index 00000000..4c42edb1 --- /dev/null +++ b/apps/couch/test/etap/test_util.erl.in @@ -0,0 +1,35 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(test_util). + +-export([init_code_path/0]). +-export([source_file/1, build_file/1]). + +srcdir() -> + "@abs_top_srcdir@". + +builddir() -> + "@abs_top_builddir@". + +init_code_path() -> + Paths = ["etap", "couchdb", "erlang-oauth", "ibrowse", "mochiweb"], + lists:foreach(fun(Name) -> + code:add_pathz(filename:join([builddir(), "src", Name])) + end, Paths). + +source_file(Name) -> + filename:join([srcdir(), Name]). + +build_file(Name) -> + filename:join([builddir(), Name]). + diff --git a/apps/couch/test/javascript/cli_runner.js b/apps/couch/test/javascript/cli_runner.js new file mode 100644 index 00000000..cdbe2e73 --- /dev/null +++ b/apps/couch/test/javascript/cli_runner.js @@ -0,0 +1,52 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var console = { + log: function(arg) { + var msg = (arg.toString()).replace(/\n/g, "\n "); + print("# " + msg); + } +}; + +function T(arg1, arg2) { + if(!arg1) { + throw((arg2 ? arg2 : arg1).toString()); + } +} + +function runTestConsole(num, name, func) { + try { + func(); + print("ok " + num + " " + name); + } catch(e) { + msg = e.toString(); + msg = msg.replace(/\n/g, "\n "); + print("not ok " + num + " " + name + " " + msg); + } +} + +function runAllTestsConsole() { + var numTests = 0; + for(var t in couchTests) { numTests += 1; } + print("1.." + numTests); + var testId = 0; + for(var t in couchTests) { + testId += 1; + runTestConsole(testId, t, couchTests[t]); + } +}; + +try { + runAllTestsConsole(); +} catch (e) { + p("# " + e.toString()); +} diff --git a/apps/couch/test/javascript/couch_http.js b/apps/couch/test/javascript/couch_http.js new file mode 100644 index 00000000..5f4716d2 --- /dev/null +++ b/apps/couch/test/javascript/couch_http.js @@ -0,0 +1,62 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +(function() { + CouchHTTP.prototype.base_url = "http://127.0.0.1:5984" + + if(typeof(CouchHTTP) != "undefined") { + CouchHTTP.prototype.open = function(method, url, async) { + if(!/^\s*http:\/\//.test(url)) { + if(/^[^\/]/.test(url)) { + url = this.base_url + "/" + url; + } else { + url = this.base_url + url; + } + } + + return this._open(method, url, async); + }; + + CouchHTTP.prototype.setRequestHeader = function(name, value) { + // Drop content-length headers because cURL will set it for us + // based on body length + if(name.toLowerCase().replace(/^\s+|\s+$/g, '') != "content-length") { + this._setRequestHeader(name, value); + } + } + + CouchHTTP.prototype.send = function(body) { + this._send(body || ""); + var headers = {}; + this._headers.forEach(function(hdr) { + var pair = hdr.split(":"); + var name = pair.shift(); + headers[name] = pair.join(":").replace(/^\s+|\s+$/g, ""); + }); + this.headers = headers; + }; + + CouchHTTP.prototype.getResponseHeader = function(name) { + for(var hdr in this.headers) { + if(hdr.toLowerCase() == name.toLowerCase()) { + return this.headers[hdr]; + } + } + return null; + }; + } +})(); + +CouchDB.urlPrefix = ""; +CouchDB.newXhr = function() { + return new CouchHTTP(); +}; diff --git a/apps/couch/test/javascript/run.tpl b/apps/couch/test/javascript/run.tpl new file mode 100644 index 00000000..c5abe6e7 --- /dev/null +++ b/apps/couch/test/javascript/run.tpl @@ -0,0 +1,30 @@ +#!/bin/sh -e + +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +SRC_DIR=%abs_top_srcdir% +SCRIPT_DIR=$SRC_DIR/share/www/script +JS_TEST_DIR=$SRC_DIR/test/javascript + +COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs + +cat $SCRIPT_DIR/json2.js \ + $SCRIPT_DIR/sha1.js \ + $SCRIPT_DIR/oauth.js \ + $SCRIPT_DIR/couch.js \ + $SCRIPT_DIR/couch_test_runner.js \ + $SCRIPT_DIR/couch_tests.js \ + $SCRIPT_DIR/test/*.js \ + $JS_TEST_DIR/couch_http.js \ + $JS_TEST_DIR/cli_runner.js \ + | $COUCHJS - diff --git a/apps/couch/test/view_server/query_server_spec.rb b/apps/couch/test/view_server/query_server_spec.rb new file mode 100644 index 00000000..de1df5c1 --- /dev/null +++ b/apps/couch/test/view_server/query_server_spec.rb @@ -0,0 +1,824 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +# to run (requires ruby and rspec): +# spec test/view_server/query_server_spec.rb -f specdoc --color +# +# environment options: +# QS_TRACE=true +# shows full output from the query server +# QS_LANG=lang +# run tests on the query server (for now, one of: js, erlang) +# + +COUCH_ROOT = "#{File.dirname(__FILE__)}/../.." unless defined?(COUCH_ROOT) +LANGUAGE = ENV["QS_LANG"] || "js" + +puts "Running query server specs for #{LANGUAGE} query server" + +require 'spec' +require 'json' + +class OSProcessRunner + def self.run + trace = ENV["QS_TRACE"] || false + puts "launching #{run_command}" if trace + if block_given? + IO.popen(run_command, "r+") do |io| + qs = QueryServerRunner.new(io, trace) + yield qs + end + else + io = IO.popen(run_command, "r+") + QueryServerRunner.new(io, trace) + end + end + def initialize io, trace = false + @qsio = io + @trace = trace + end + def close + @qsio.close + end + def reset! + run(["reset"]) + end + def add_fun(fun) + run(["add_fun", fun]) + end + def teach_ddoc(ddoc) + run(["ddoc", "new", ddoc_id(ddoc), ddoc]) + end + def ddoc_run(ddoc, fun_path, args) + run(["ddoc", ddoc_id(ddoc), fun_path, args]) + end + def ddoc_id(ddoc) + d_id = ddoc["_id"] + raise 'ddoc must have _id' unless d_id + d_id + end + def get_chunks + resp = jsgets + raise "not a chunk" unless resp.first == "chunks" + return resp[1] + end + def run json + rrun json + jsgets + end + def rrun json + line = json.to_json + puts "run: #{line}" if @trace + @qsio.puts line + end + def rgets + resp = @qsio.gets + puts "got: #{resp}" if @trace + resp + end + def jsgets + resp = rgets + # err = @qserr.gets + # puts "err: #{err}" if err + if resp + begin + rj = JSON.parse("[#{resp.chomp}]")[0] + rescue JSON::ParserError + puts "JSON ERROR (dump under trace mode)" + # puts resp.chomp + while resp = rgets + # puts resp.chomp + end + end + if rj.respond_to?(:[]) && rj.is_a?(Array) + if rj[0] == "log" + log = rj[1] + puts "log: #{log}" if @trace + rj = jsgets + end + end + rj + else + raise "no response" + end + end +end + +class QueryServerRunner < OSProcessRunner + + COMMANDS = { + "js" => "#{COUCH_ROOT}/bin/couchjs_dev #{COUCH_ROOT}/share/server/main.js", + "erlang" => "#{COUCH_ROOT}/test/view_server/run_native_process.es" + } + + def self.run_command + COMMANDS[LANGUAGE] + end +end + +class ExternalRunner < OSProcessRunner + def self.run_command + "#{COUCH_ROOT}/src/couchdb/couchjs #{COUCH_ROOT}/share/server/echo.js" + end +end + +# we could organize this into a design document per language. +# that would make testing future languages really easy. + +functions = { + "emit-twice" => { + "js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}}, + "erlang" => <<-ERLANG + fun({Doc}) -> + A = couch_util:get_value(<<"a">>, Doc, null), + Emit(<<"foo">>, A), + Emit(<<"bar">>, A) + end. + ERLANG + }, + "emit-once" => { + "js" => <<-JS, + function(doc){ + emit("baz",doc.a) + } + JS + "erlang" => <<-ERLANG + fun({Doc}) -> + A = couch_util:get_value(<<"a">>, Doc, null), + Emit(<<"baz">>, A) + end. + ERLANG + }, + "reduce-values-length" => { + "js" => %{function(keys, values, rereduce) { return values.length; }}, + "erlang" => %{fun(Keys, Values, ReReduce) -> length(Values) end.} + }, + "reduce-values-sum" => { + "js" => %{function(keys, values, rereduce) { return sum(values); }}, + "erlang" => %{fun(Keys, Values, ReReduce) -> lists:sum(Values) end.} + }, + "validate-forbidden" => { + "js" => <<-JS, + function(newDoc, oldDoc, userCtx) { + if(newDoc.bad) + throw({forbidden:"bad doc"}); "foo bar"; + } + JS + "erlang" => <<-ERLANG + fun({NewDoc}, _OldDoc, _UserCtx) -> + case couch_util:get_value(<<"bad">>, NewDoc) of + undefined -> 1; + _ -> {[{forbidden, <<"bad doc">>}]} + end + end. + ERLANG + }, + "show-simple" => { + "js" => <<-JS, + function(doc, req) { + log("ok"); + return [doc.title, doc.body].join(' - '); + } + JS + "erlang" => <<-ERLANG + fun({Doc}, Req) -> + Title = couch_util:get_value(<<"title">>, Doc), + Body = couch_util:get_value(<<"body">>, Doc), + Resp = <>, + {[{<<"body">>, Resp}]} + end. + ERLANG + }, + "show-headers" => { + "js" => <<-JS, + function(doc, req) { + var resp = {"code":200, "headers":{"X-Plankton":"Rusty"}}; + resp.body = [doc.title, doc.body].join(' - '); + return resp; + } + JS + "erlang" => <<-ERLANG + fun({Doc}, Req) -> + Title = couch_util:get_value(<<"title">>, Doc), + Body = couch_util:get_value(<<"body">>, Doc), + Resp = <<Title/binary, " - ", Body/binary>>, + {[ + {<<"code">>, 200}, + {<<"headers">>, {[{<<"X-Plankton">>, <<"Rusty">>}]}}, + {<<"body">>, Resp} + ]} + end. + ERLANG + }, + "show-sends" => { + "js" => <<-JS, + function(head, req) { + start({headers:{"Content-Type" : "text/plain"}}); + send("first chunk"); + send('second "chunk"'); + return "tail"; + }; + JS + "erlang" => <<-ERLANG + fun(Head, Req) -> + Resp = {[ + {<<"headers">>, {[{<<"Content-Type">>, <<"text/plain">>}]}} + ]}, + Start(Resp), + Send(<<"first chunk">>), + Send(<<"second \\\"chunk\\\"">>), + <<"tail">> + end. + ERLANG + }, + "show-while-get-rows" => { + "js" => <<-JS, + function(head, req) { + send("first chunk"); + send(req.q); + var row; + log("about to getRow " + typeof(getRow)); + while(row = getRow()) { + send(row.key); + }; + return "tail"; + }; + JS + "erlang" => <<-ERLANG, + fun(Head, {Req}) -> + Send(<<"first chunk">>), + Send(couch_util:get_value(<<"q">>, Req)), + Fun = fun({Row}, _) -> + Send(couch_util:get_value(<<"key">>, Row)), + {ok, nil} + end, + {ok, _} = FoldRows(Fun, nil), + <<"tail">> + end. + ERLANG + }, + "show-while-get-rows-multi-send" => { + "js" => <<-JS, + function(head, req) { + send("bacon"); + var row; + log("about to getRow " + typeof(getRow)); + while(row = getRow()) { + send(row.key); + send("eggs"); + }; + return "tail"; + }; + JS + "erlang" => <<-ERLANG, + fun(Head, Req) -> + Send(<<"bacon">>), + Fun = fun({Row}, _) -> + Send(couch_util:get_value(<<"key">>, Row)), + Send(<<"eggs">>), + {ok, nil} + end, + FoldRows(Fun, nil), + <<"tail">> + end. + ERLANG + }, + "list-simple" => { + "js" => <<-JS, + function(head, req) { + send("first chunk"); + send(req.q); + var row; + while(row = getRow()) { + send(row.key); + }; + return "early"; + }; + JS + "erlang" => <<-ERLANG, + fun(Head, {Req}) -> + Send(<<"first chunk">>), + Send(couch_util:get_value(<<"q">>, Req)), + Fun = fun({Row}, _) -> + Send(couch_util:get_value(<<"key">>, Row)), + {ok, nil} + end, + FoldRows(Fun, nil), + <<"early">> + end. + ERLANG + }, + "list-chunky" => { + "js" => <<-JS, + function(head, req) { + send("first chunk"); + send(req.q); + var row, i=0; + while(row = getRow()) { + send(row.key); + i += 1; + if (i > 2) { + return('early tail'); + } + }; + }; + JS + "erlang" => <<-ERLANG, + fun(Head, {Req}) -> + Send(<<"first chunk">>), + Send(couch_util:get_value(<<"q">>, Req)), + Fun = fun + ({Row}, Count) when Count < 2 -> + Send(couch_util:get_value(<<"key">>, Row)), + {ok, Count+1}; + ({Row}, Count) when Count == 2 -> + Send(couch_util:get_value(<<"key">>, Row)), + {stop, <<"early tail">>} + end, + {ok, Tail} = FoldRows(Fun, 0), + Tail + end. + ERLANG + }, + "list-old-style" => { + "js" => <<-JS, + function(head, req, foo, bar) { + return "stuff"; + } + JS + "erlang" => <<-ERLANG, + fun(Head, Req, Foo, Bar) -> + <<"stuff">> + end. + ERLANG + }, + "list-capped" => { + "js" => <<-JS, + function(head, req) { + send("bacon") + var row, i = 0; + while(row = getRow()) { + send(row.key); + i += 1; + if (i > 2) { + return('early'); + } + }; + } + JS + "erlang" => <<-ERLANG, + fun(Head, Req) -> + Send(<<"bacon">>), + Fun = fun + ({Row}, Count) when Count < 2 -> + Send(couch_util:get_value(<<"key">>, Row)), + {ok, Count+1}; + ({Row}, Count) when Count == 2 -> + Send(couch_util:get_value(<<"key">>, Row)), + {stop, <<"early">>} + end, + {ok, Tail} = FoldRows(Fun, 0), + Tail + end. + ERLANG + }, + "list-raw" => { + "js" => <<-JS, + function(head, req) { + // log(this.toSource()); + // log(typeof send); + send("first chunk"); + send(req.q); + var row; + while(row = getRow()) { + send(row.key); + }; + return "tail"; + }; + JS + "erlang" => <<-ERLANG, + fun(Head, {Req}) -> + Send(<<"first chunk">>), + Send(couch_util:get_value(<<"q">>, Req)), + Fun = fun({Row}, _) -> + Send(couch_util:get_value(<<"key">>, Row)), + {ok, nil} + end, + FoldRows(Fun, nil), + <<"tail">> + end. + ERLANG + }, + "filter-basic" => { + "js" => <<-JS, + function(doc, req) { + if (doc.good) { + return true; + } + } + JS + "erlang" => <<-ERLANG, + fun({Doc}, Req) -> + couch_util:get_value(<<"good">>, Doc) + end. + ERLANG + }, + "update-basic" => { + "js" => <<-JS, + function(doc, req) { + doc.world = "hello"; + var resp = [doc, "hello doc"]; + return resp; + } + JS + "erlang" => <<-ERLANG, + fun({Doc}, Req) -> + Doc2 = [{<<"world">>, <<"hello">>}|Doc], + [{Doc2}, {[{<<"body">>, <<"hello doc">>}]}] + end. + ERLANG + }, + "error" => { + "js" => <<-JS, + function() { + throw(["error","error_key","testing"]); + } + JS + "erlang" => <<-ERLANG + fun(A, B) -> + throw([<<"error">>,<<"error_key">>,<<"testing">>]) + end. + ERLANG + }, + "fatal" => { + "js" => <<-JS, + function() { + throw(["fatal","error_key","testing"]); + } + JS + "erlang" => <<-ERLANG + fun(A, B) -> + throw([<<"fatal">>,<<"error_key">>,<<"testing">>]) + end. + ERLANG + } +} + +def make_ddoc(fun_path, fun_str) + doc = {"_id"=>"foo"} + d = doc + while p = fun_path.shift + l = p + if !fun_path.empty? + d[p] = {} + d = d[p] + end + end + d[l] = fun_str + doc +end + +describe "query server normal case" do + before(:all) do + `cd #{COUCH_ROOT} && make` + @qs = QueryServerRunner.run + end + after(:all) do + @qs.close + end + it "should reset" do + @qs.run(["reset"]).should == true + end + it "should not erase ddocs on reset" do + @fun = functions["show-simple"][LANGUAGE] + @ddoc = make_ddoc(["shows","simple"], @fun) + @qs.teach_ddoc(@ddoc) + @qs.run(["reset"]).should == true + @qs.ddoc_run(@ddoc, + ["shows","simple"], + [{:title => "Best ever", :body => "Doc body"}, {}]).should == + ["resp", {"body" => "Best ever - Doc body"}] + end + + it "should run map funs" do + @qs.reset! + @qs.run(["add_fun", functions["emit-twice"][LANGUAGE]]).should == true + @qs.run(["add_fun", functions["emit-once"][LANGUAGE]]).should == true + rows = @qs.run(["map_doc", {:a => "b"}]) + rows[0][0].should == ["foo", "b"] + rows[0][1].should == ["bar", "b"] + rows[1][0].should == ["baz", "b"] + end + describe "reduce" do + before(:all) do + @fun = functions["reduce-values-length"][LANGUAGE] + @qs.reset! + end + it "should reduce" do + kvs = (0...10).collect{|i|[i,i*2]} + @qs.run(["reduce", [@fun], kvs]).should == [true, [10]] + end + end + describe "rereduce" do + before(:all) do + @fun = functions["reduce-values-sum"][LANGUAGE] + @qs.reset! + end + it "should rereduce" do + vs = (0...10).collect{|i|i} + @qs.run(["rereduce", [@fun], vs]).should == [true, [45]] + end + end + + describe "design docs" do + before(:all) do + @ddoc = { + "_id" => "foo" + } + @qs.reset! + end + it "should learn design docs" do + @qs.teach_ddoc(@ddoc).should == true + end + end + + # it "should validate" + describe "validation" do + before(:all) do + @fun = functions["validate-forbidden"][LANGUAGE] + @ddoc = make_ddoc(["validate_doc_update"], @fun) + @qs.teach_ddoc(@ddoc) + end + it "should allow good updates" do + @qs.ddoc_run(@ddoc, + ["validate_doc_update"], + [{"good" => true}, {}, {}]).should == 1 + end + it "should reject invalid updates" do + @qs.ddoc_run(@ddoc, + ["validate_doc_update"], + [{"bad" => true}, {}, {}]).should == {"forbidden"=>"bad doc"} + end + end + + describe "show" do + before(:all) do + @fun = functions["show-simple"][LANGUAGE] + @ddoc = make_ddoc(["shows","simple"], @fun) + @qs.teach_ddoc(@ddoc) + end + it "should show" do + @qs.ddoc_run(@ddoc, + ["shows","simple"], + [{:title => "Best ever", :body => "Doc body"}, {}]).should == + ["resp", {"body" => "Best ever - Doc body"}] + end + end + + describe "show with headers" do + before(:all) do + # TODO we can make real ddocs up there. + @fun = functions["show-headers"][LANGUAGE] + @ddoc = make_ddoc(["shows","headers"], @fun) + @qs.teach_ddoc(@ddoc) + end + it "should show headers" do + @qs.ddoc_run( + @ddoc, + ["shows","headers"], + [{:title => "Best ever", :body => "Doc body"}, {}] + ). + should == ["resp", {"code"=>200,"headers" => {"X-Plankton"=>"Rusty"}, "body" => "Best ever - Doc body"}] + end + end + + describe "recoverable error" do + before(:all) do + @fun = functions["error"][LANGUAGE] + @ddoc = make_ddoc(["shows","error"], @fun) + @qs.teach_ddoc(@ddoc) + end + it "should not exit" do + @qs.ddoc_run(@ddoc, ["shows","error"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["error", "error_key", "testing"] + # still running + @qs.run(["reset"]).should == true + end + end + + describe "changes filter" do + before(:all) do + @fun = functions["filter-basic"][LANGUAGE] + @ddoc = make_ddoc(["filters","basic"], @fun) + @qs.teach_ddoc(@ddoc) + end + it "should only return true for good docs" do + @qs.ddoc_run(@ddoc, + ["filters","basic"], + [[{"key"=>"bam", "good" => true}, {"foo" => "bar"}, {"good" => true}], {"req" => "foo"}] + ). + should == [true, [true, false, true]] + end + end + + describe "update" do + before(:all) do + # in another patch we can remove this duplication + # by setting up the design doc for each language ahead of time. + @fun = functions["update-basic"][LANGUAGE] + @ddoc = make_ddoc(["updates","basic"], @fun) + @qs.teach_ddoc(@ddoc) + end + it "should return a doc and a resp body" do + up, doc, resp = @qs.ddoc_run(@ddoc, + ["updates","basic"], + [{"foo" => "gnarly"}, {"method" => "POST"}] + ) + up.should == "up" + doc.should == {"foo" => "gnarly", "world" => "hello"} + resp["body"].should == "hello doc" + end + end + +# end +# LIST TESTS +# __END__ + + describe "ddoc list" do + before(:all) do + @ddoc = { + "_id" => "foo", + "lists" => { + "simple" => functions["list-simple"][LANGUAGE], + "headers" => functions["show-sends"][LANGUAGE], + "rows" => functions["show-while-get-rows"][LANGUAGE], + "buffer-chunks" => functions["show-while-get-rows-multi-send"][LANGUAGE], + "chunky" => functions["list-chunky"][LANGUAGE] + } + } + @qs.teach_ddoc(@ddoc) + end + + describe "example list" do + it "should run normal" do + @qs.ddoc_run(@ddoc, + ["lists","simple"], + [{"foo"=>"bar"}, {"q" => "ok"}] + ).should == ["start", ["first chunk", "ok"], {"headers"=>{}}] + @qs.run(["list_row", {"key"=>"baz"}]).should == ["chunks", ["baz"]] + @qs.run(["list_row", {"key"=>"bam"}]).should == ["chunks", ["bam"]] + @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]] + @qs.run(["list_row", {"key"=>"fooz"}]).should == ["chunks", ["fooz"]] + @qs.run(["list_row", {"key"=>"foox"}]).should == ["chunks", ["foox"]] + @qs.run(["list_end"]).should == ["end" , ["early"]] + end + end + + describe "headers" do + it "should do headers proper" do + @qs.ddoc_run(@ddoc, ["lists","headers"], + [{"total_rows"=>1000}, {"q" => "ok"}] + ).should == ["start", ["first chunk", 'second "chunk"'], + {"headers"=>{"Content-Type"=>"text/plain"}}] + @qs.rrun(["list_end"]) + @qs.jsgets.should == ["end", ["tail"]] + end + end + + describe "with rows" do + it "should list em" do + @qs.ddoc_run(@ddoc, ["lists","rows"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["start", ["first chunk", "ok"], {"headers"=>{}}] + @qs.rrun(["list_row", {"key"=>"baz"}]) + @qs.get_chunks.should == ["baz"] + @qs.rrun(["list_row", {"key"=>"bam"}]) + @qs.get_chunks.should == ["bam"] + @qs.rrun(["list_end"]) + @qs.jsgets.should == ["end", ["tail"]] + end + it "should work with zero rows" do + @qs.ddoc_run(@ddoc, ["lists","rows"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["start", ["first chunk", "ok"], {"headers"=>{}}] + @qs.rrun(["list_end"]) + @qs.jsgets.should == ["end", ["tail"]] + end + end + + describe "should buffer multiple chunks sent for a single row." do + it "should should buffer em" do + @qs.ddoc_run(@ddoc, ["lists","buffer-chunks"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["start", ["bacon"], {"headers"=>{}}] + @qs.rrun(["list_row", {"key"=>"baz"}]) + @qs.get_chunks.should == ["baz", "eggs"] + @qs.rrun(["list_row", {"key"=>"bam"}]) + @qs.get_chunks.should == ["bam", "eggs"] + @qs.rrun(["list_end"]) + @qs.jsgets.should == ["end", ["tail"]] + end + end + it "should end after 2" do + @qs.ddoc_run(@ddoc, ["lists","chunky"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["start", ["first chunk", "ok"], {"headers"=>{}}] + + @qs.run(["list_row", {"key"=>"baz"}]). + should == ["chunks", ["baz"]] + + @qs.run(["list_row", {"key"=>"bam"}]). + should == ["chunks", ["bam"]] + + @qs.run(["list_row", {"key"=>"foom"}]). + should == ["end", ["foom", "early tail"]] + # here's where js has to discard quit properly + @qs.run(["reset"]). + should == true + end + end + end + + + +def should_have_exited qs + begin + qs.run(["reset"]) + "raise before this (except Erlang)".should == true + rescue RuntimeError => e + e.message.should == "no response" + rescue Errno::EPIPE + true.should == true + end +end + +describe "query server that exits" do + before(:each) do + @qs = QueryServerRunner.run + @ddoc = { + "_id" => "foo", + "lists" => { + "capped" => functions["list-capped"][LANGUAGE], + "raw" => functions["list-raw"][LANGUAGE] + }, + "shows" => { + "fatal" => functions["fatal"][LANGUAGE] + } + } + @qs.teach_ddoc(@ddoc) + end + after(:each) do + @qs.close + end + + describe "only goes to 2 list" do + it "should exit if erlang sends too many rows" do + @qs.ddoc_run(@ddoc, ["lists","capped"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["start", ["bacon"], {"headers"=>{}}] + @qs.run(["list_row", {"key"=>"baz"}]).should == ["chunks", ["baz"]] + @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]] + @qs.run(["list_row", {"key"=>"fooz"}]).should == ["end", ["fooz", "early"]] + e = @qs.run(["list_row", {"key"=>"foox"}]) + e[0].should == "error" + e[1].should == "unknown_command" + should_have_exited @qs + end + end + + describe "raw list" do + it "should exit if it gets a non-row in the middle" do + @qs.ddoc_run(@ddoc, ["lists","raw"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["start", ["first chunk", "ok"], {"headers"=>{}}] + e = @qs.run(["reset"]) + e[0].should == "error" + e[1].should == "list_error" + should_have_exited @qs + end + end + + describe "fatal error" do + it "should exit" do + @qs.ddoc_run(@ddoc, ["shows","fatal"], + [{"foo"=>"bar"}, {"q" => "ok"}]). + should == ["error", "error_key", "testing"] + should_have_exited @qs + end + end +end + +describe "thank you for using the tests" do + it "for more info run with QS_TRACE=true or see query_server_spec.rb file header" do + end +end \ No newline at end of file diff --git a/apps/couch/test/view_server/run_native_process.es b/apps/couch/test/view_server/run_native_process.es new file mode 100755 index 00000000..fcf16d75 --- /dev/null +++ b/apps/couch/test/view_server/run_native_process.es @@ -0,0 +1,59 @@ +#! /usr/bin/env escript + +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +read() -> + case io:get_line('') of + eof -> stop; + Data -> couch_util:json_decode(Data) + end. + +send(Data) when is_binary(Data) -> + send(binary_to_list(Data)); +send(Data) when is_list(Data) -> + io:format(Data ++ "\n", []). + +write(Data) -> + % log("~p", [Data]), + case (catch couch_util:json_encode(Data)) of + % when testing, this is what prints your errors + {json_encode, Error} -> write({[{<<"error">>, Error}]}); + Json -> send(Json) + end. + +% log(Mesg) -> +% log(Mesg, []). +% log(Mesg, Params) -> +% io:format(standard_error, Mesg, Params). +% jlog(Mesg) -> +% write([<<"log">>, list_to_binary(io_lib:format("~p",[Mesg]))]). + +loop(Pid) -> + case read() of + stop -> ok; + Json -> + case (catch couch_native_process:prompt(Pid, Json)) of + {error, Reason} -> + ok = write([error, Reason, Reason]); + Resp -> + ok = write(Resp), + loop(Pid) + end + end. + +main([]) -> + code:add_pathz("src/couchdb"), + code:add_pathz("src/mochiweb"), + {ok, Pid} = couch_native_process:start_link(), + loop(Pid). + diff --git a/license.skip b/license.skip deleted file mode 100644 index a7aa6ec3..00000000 --- a/license.skip +++ /dev/null @@ -1,107 +0,0 @@ -\.svn -^AUTHORS -^BUGS -^CHANGES -^DEVELOPERS -^DEVELOPERS.gz -^INSTALL -^INSTALL.Unix -^INSTALL.Unix.gz -^INSTALL.Windows -^INSTALL.Windows.gz -^INSTALL.gz -^LICENSE.gz -^Makefile -^Makefile.in -^NEWS -^NOTICE -^README -^THANKS -^aclocal.m4 -^apache-couchdb-* -^autom4te.cache/* -^bin/Makefile -^bin/Makefile.in -^bin/couchdb.1 -^bin/couchjs.1 -^build-aux/* -^config.* -^configure -^couchdb.stderr -^couchdb.stdout -^cover/.*\.coverdata -^cover/.*\.html -^erl_crash.dump -^etc/Makefile -^etc/Makefile.in -^etc/couchdb/Makefile -^etc/couchdb/Makefile.in -^etc/couchdb/default* -^etc/couchdb/local* -^etc/default/Makefile -^etc/default/Makefile.in -^etc/default/couchdb -^etc/init/Makefile -^etc/init/Makefile.in -^etc/launchd/Makefile -^etc/launchd/Makefile.in -^etc/launchd/org.apache.couchdb.plist.* -^etc/logrotate.d/Makefile -^etc/logrotate.d/Makefile.in -^etc/logrotate.d/couchdb* -^etc/windows/Makefile -^etc/windows/README.txt.tpl -^libtool -^license.skip -^m4/* -^share/Makefile -^share/Makefile.in -^share/server/json2.js -^share/server/mimeparse.js -^share/www/favicon.ico -^share/www/image/* -^share/www/script/jquery.* -^share/www/script/json2.js -^share/www/script/jspec/* -^share/www/script/sha1.js -^share/www/script/base64.js -^share/www/script/test/lorem* -^src/Makefile -^src/Makefile.in -^src/couchdb/.*beam -^src/couchdb/.deps/* -^src/couchdb/Makefile -^src/couchdb/Makefile.in -^src/couchdb/couch.app* -^src/couchdb/couch.app.tpl.in -^src/couchdb/priv/.*o -^src/couchdb/priv/.deps/* -^src/couchdb/priv/Makefile -^src/couchdb/priv/Makefile.in -^src/couchdb/priv/couch_icu_driver.la -^src/couchdb/priv/couchjs -^src/couchdb/priv/couchspawnkillable -^src/couchdb/priv/stat_descriptions.cfg -^src/erlang-oauth/* -^src/etap/* -^src/ibrowse/* -^src/mochiweb/* -^stamp-h1 -^test/Makefile -^test/Makefile.in -^test/bench/Makefile -^test/bench/Makefile.in -^test/etap/.*beam -^test/etap/Makefile -^test/etap/Makefile.in -^test/etap/temp.* -^test/javascript/Makefile -^test/javascript/Makefile.in -^test/local.ini -^test/view_server/Makefile -^test/view_server/Makefile.in -^tmp/* -^utils/Makefile -^utils/Makefile.in -^var/Makefile -^var/Makefile.in diff --git a/rebar.config b/rebar.config deleted file mode 100644 index ea77f778..00000000 --- a/rebar.config +++ /dev/null @@ -1,14 +0,0 @@ -{sub_dirs, [ - "apps/ibrowse", - "apps/couch", - "apps/chttpd", - "apps/etap", - "apps/fabric", - "apps/mem3", - "apps/mochiweb", - "apps/oauth", - "apps/rexi", - "rel" -]}. -{erl_opts, [debug_info]}. -{lib_dirs, ["apps"]}. diff --git a/rel/overlay/var/share/server/filter.js b/rel/overlay/var/share/server/filter.js new file mode 100644 index 00000000..1e8556a4 --- /dev/null +++ b/rel/overlay/var/share/server/filter.js @@ -0,0 +1,23 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var Filter = { + filter : function(fun, ddoc, args) { + var results = []; + var docs = args[0]; + var req = args[1]; + for (var i=0; i < docs.length; i++) { + results.push((fun.apply(ddoc, [docs[i], req]) && true) || false); + }; + respond([true, results]); + } +}; diff --git a/rel/overlay/var/share/server/json2.js b/rel/overlay/var/share/server/json2.js new file mode 100644 index 00000000..39d8f370 --- /dev/null +++ b/rel/overlay/var/share/server/json2.js @@ -0,0 +1,481 @@ +/* + http://www.JSON.org/json2.js + 2009-09-29 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, strict: false */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +if (!this.JSON) { + this.JSON = {}; +} + +(function () { + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + if (typeof Date.prototype.toJSON !== 'function') { + + Date.prototype.toJSON = function (key) { + + return isFinite(this.valueOf()) ? + this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z' : null; + }; + + String.prototype.toJSON = + Number.prototype.toJSON = + Boolean.prototype.toJSON = function (key) { + return this.valueOf(); + }; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? + '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' ? c : + '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : + '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key]; + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + return quote(value); + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 ? '[]' : + gap ? '[\n' + gap + + partial.join(',\n' + gap) + '\n' + + mind + ']' : + '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + k = rep[i]; + if (typeof k === 'string') { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + for (k in value) { + if (Object.hasOwnProperty.call(value, k)) { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 ? '{}' : + gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + + mind + '}' : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } + + +// If the JSON object does not yet have a parse method, give it one. + + if (typeof JSON.parse !== 'function') { + JSON.parse = function (text, reviver) { + +// The parse method takes a text and an optional reviver function, and returns +// a JavaScript value if the text is a valid JSON text. + + var j; + + function walk(holder, key) { + +// The walk method is used to recursively walk the resulting structure so +// that modifications can be made. + + var k, v, value = holder[key]; + if (value && typeof value === 'object') { + for (k in value) { + if (Object.hasOwnProperty.call(value, k)) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + } + } + } + return reviver.call(holder, key, value); + } + + +// Parsing happens in four stages. In the first stage, we replace certain +// Unicode characters with escape sequences. JavaScript handles many characters +// incorrectly, either silently deleting them, or treating them as line endings. + + cx.lastIndex = 0; + if (cx.test(text)) { + text = text.replace(cx, function (a) { + return '\\u' + + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }); + } + +// In the second stage, we run the text against regular expressions that look +// for non-JSON patterns. We are especially concerned with '()' and 'new' +// because they can cause invocation, and '=' because it can cause mutation. +// But just to be safe, we want to reject all unexpected forms. + +// We split the second stage into 4 regexp operations in order to work around +// crippling inefficiencies in IE's and Safari's regexp engines. First we +// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we +// replace all simple value tokens with ']' characters. Third, we delete all +// open brackets that follow a colon or comma or that begin the text. Finally, +// we look to see that the remaining characters are only whitespace or ']' or +// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval. + + if (/^[\],:{}\s]*$/. +test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@'). +replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']'). +replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) { + +// In the third stage we use the eval function to compile the text into a +// JavaScript structure. The '{' operator is subject to a syntactic ambiguity +// in JavaScript: it can begin a block or an object literal. We wrap the text +// in parens to eliminate the ambiguity. + + j = eval('(' + text + ')'); + +// In the optional fourth stage, we recursively walk the new structure, passing +// each name/value pair to a reviver function for possible transformation. + + return typeof reviver === 'function' ? + walk({'': j}, '') : j; + } + +// If the text is not JSON parseable, then a SyntaxError is thrown. + + throw new SyntaxError('JSON.parse'); + }; + } +}()); diff --git a/rel/overlay/var/share/server/loop.js b/rel/overlay/var/share/server/loop.js new file mode 100644 index 00000000..300151e9 --- /dev/null +++ b/rel/overlay/var/share/server/loop.js @@ -0,0 +1,140 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var sandbox = null; + +function init_sandbox() { + try { + // if possible, use evalcx (not always available) + sandbox = evalcx(''); + sandbox.emit = Views.emit; + sandbox.sum = Views.sum; + sandbox.log = log; + sandbox.toJSON = Couch.toJSON; + sandbox.JSON = JSON; + sandbox.provides = Mime.provides; + sandbox.registerType = Mime.registerType; + sandbox.start = Render.start; + sandbox.send = Render.send; + sandbox.getRow = Render.getRow; + } catch (e) { + log(e.toSource()); + } +}; +init_sandbox(); + +// Commands are in the form of json arrays: +// ["commandname",..optional args...]\n +// +// Responses are json values followed by a new line ("\n") + +var DDoc = (function() { + var ddoc_dispatch = { + "lists" : Render.list, + "shows" : Render.show, + "filters" : Filter.filter, + "updates" : Render.update, + "validate_doc_update" : Validate.validate + }; + var ddocs = {}; + return { + ddoc : function() { + var args = []; + for (var i=0; i < arguments.length; i++) { + args.push(arguments[i]); + }; + var ddocId = args.shift(); + if (ddocId == "new") { + // get the real ddocId. + ddocId = args.shift(); + // store the ddoc, functions are lazily compiled. + ddocs[ddocId] = args.shift(); + print("true"); + } else { + // Couch makes sure we know this ddoc already. + var ddoc = ddocs[ddocId]; + if (!ddoc) throw(["fatal", "query_protocol_error", "uncached design doc: "+ddocId]); + var funPath = args.shift(); + var cmd = funPath[0]; + // the first member of the fun path determines the type of operation + var funArgs = args.shift(); + if (ddoc_dispatch[cmd]) { + // get the function, call the command with it + var point = ddoc; + for (var i=0; i < funPath.length; i++) { + if (i+1 == funPath.length) { + fun = point[funPath[i]] + if (typeof fun != "function") { + fun = Couch.compileFunction(fun, ddoc); + // cache the compiled fun on the ddoc + point[funPath[i]] = fun + }; + } else { + point = point[funPath[i]] + } + }; + + // run the correct responder with the cmd body + ddoc_dispatch[cmd].apply(null, [fun, ddoc, funArgs]); + } else { + // unknown command, quit and hope the restarted version is better + throw(["fatal", "unknown_command", "unknown ddoc command '" + cmd + "'"]); + } + } + } + }; +})(); + +var Loop = function() { + var line, cmd, cmdkey, dispatch = { + "ddoc" : DDoc.ddoc, + // "view" : Views.handler, + "reset" : State.reset, + "add_fun" : State.addFun, + "map_doc" : Views.mapDoc, + "reduce" : Views.reduce, + "rereduce" : Views.rereduce + }; + function handleError(e) { + var type = e[0]; + if (type == "fatal") { + e[0] = "error"; // we tell the client it was a fatal error by dying + respond(e); + quit(-1); + } else if (type == "error") { + respond(e); + } else if (e.error && e.reason) { + // compatibility with old error format + respond(["error", e.error, e.reason]); + } else { + respond(["error","unnamed_error",e.toSource()]); + } + }; + while (line = readline()) { + cmd = eval('('+line+')'); + State.line_length = line.length; + try { + cmdkey = cmd.shift(); + if (dispatch[cmdkey]) { + // run the correct responder with the cmd body + dispatch[cmdkey].apply(null, cmd); + } else { + // unknown command, quit and hope the restarted version is better + throw(["fatal", "unknown_command", "unknown command '" + cmdkey + "'"]); + } + } catch(e) { + handleError(e); + } + }; +}; + +Loop(); diff --git a/rel/overlay/var/share/server/mimeparse.js b/rel/overlay/var/share/server/mimeparse.js new file mode 100644 index 00000000..3642a194 --- /dev/null +++ b/rel/overlay/var/share/server/mimeparse.js @@ -0,0 +1,158 @@ +// mimeparse.js +// +// This module provides basic functions for handling mime-types. It can +// handle matching mime-types against a list of media-ranges. See section +// 14.1 of the HTTP specification [RFC 2616] for a complete explanation. +// +// http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 +// +// A port to JavaScript of Joe Gregorio's MIME-Type Parser: +// +// http://code.google.com/p/mimeparse/ +// +// Ported by J. Chris Anderson <jchris@apache.org>, targeting the Spidermonkey runtime. +// +// To run the tests, open mimeparse-js-test.html in a browser. +// Ported from version 0.1.2 +// Comments are mostly excerpted from the original. + +var Mimeparse = (function() { + // private helpers + function strip(string) { + return string.replace(/^\s+/, '').replace(/\s+$/, '') + }; + + function parseRanges(ranges) { + var parsedRanges = [], rangeParts = ranges.split(","); + for (var i=0; i < rangeParts.length; i++) { + parsedRanges.push(publicMethods.parseMediaRange(rangeParts[i])) + }; + return parsedRanges; + }; + + var publicMethods = { + // Carves up a mime-type and returns an Array of the + // [type, subtype, params] where "params" is a Hash of all + // the parameters for the media range. + // + // For example, the media range "application/xhtml;q=0.5" would + // get parsed into: + // + // ["application", "xhtml", { "q" : "0.5" }] + parseMimeType : function(mimeType) { + var fullType, typeParts, params = {}, parts = mimeType.split(';'); + for (var i=0; i < parts.length; i++) { + var p = parts[i].split('='); + if (p.length == 2) { + params[strip(p[0])] = strip(p[1]); + } + }; + fullType = parts[0].replace(/^\s+/, '').replace(/\s+$/, ''); + if (fullType == '*') fullType = '*/*'; + typeParts = fullType.split('/'); + return [typeParts[0], typeParts[1], params]; + }, + + // Carves up a media range and returns an Array of the + // [type, subtype, params] where "params" is a Object with + // all the parameters for the media range. + // + // For example, the media range "application/*;q=0.5" would + // get parsed into: + // + // ["application", "*", { "q" : "0.5" }] + // + // In addition this function also guarantees that there + // is a value for "q" in the params dictionary, filling it + // in with a proper default if necessary. + parseMediaRange : function(range) { + var q, parsedType = this.parseMimeType(range); + if (!parsedType[2]['q']) { + parsedType[2]['q'] = '1'; + } else { + q = parseFloat(parsedType[2]['q']); + if (isNaN(q)) { + parsedType[2]['q'] = '1'; + } else if (q > 1 || q < 0) { + parsedType[2]['q'] = '1'; + } + } + return parsedType; + }, + + // Find the best match for a given mime-type against + // a list of media_ranges that have already been + // parsed by parseMediaRange(). Returns an array of + // the fitness value and the value of the 'q' quality + // parameter of the best match, or (-1, 0) if no match + // was found. Just as for qualityParsed(), 'parsed_ranges' + // must be a list of parsed media ranges. + fitnessAndQualityParsed : function(mimeType, parsedRanges) { + var bestFitness = -1, bestFitQ = 0, target = this.parseMediaRange(mimeType); + var targetType = target[0], targetSubtype = target[1], targetParams = target[2]; + + for (var i=0; i < parsedRanges.length; i++) { + var parsed = parsedRanges[i]; + var type = parsed[0], subtype = parsed[1], params = parsed[2]; + if ((type == targetType || type == "*" || targetType == "*") && + (subtype == targetSubtype || subtype == "*" || targetSubtype == "*")) { + var matchCount = 0; + for (param in targetParams) { + if (param != 'q' && params[param] && params[param] == targetParams[param]) { + matchCount += 1; + } + } + + var fitness = (type == targetType) ? 100 : 0; + fitness += (subtype == targetSubtype) ? 10 : 0; + fitness += matchCount; + + if (fitness > bestFitness) { + bestFitness = fitness; + bestFitQ = params["q"]; + } + } + }; + return [bestFitness, parseFloat(bestFitQ)]; + }, + + // Find the best match for a given mime-type against + // a list of media_ranges that have already been + // parsed by parseMediaRange(). Returns the + // 'q' quality parameter of the best match, 0 if no + // match was found. This function bahaves the same as quality() + // except that 'parsedRanges' must be a list of + // parsed media ranges. + qualityParsed : function(mimeType, parsedRanges) { + return this.fitnessAndQualityParsed(mimeType, parsedRanges)[1]; + }, + + // Returns the quality 'q' of a mime-type when compared + // against the media-ranges in ranges. For example: + // + // >>> Mimeparse.quality('text/html','text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5') + // 0.7 + quality : function(mimeType, ranges) { + return this.qualityParsed(mimeType, parseRanges(ranges)); + }, + + // Takes a list of supported mime-types and finds the best + // match for all the media-ranges listed in header. The value of + // header must be a string that conforms to the format of the + // HTTP Accept: header. The value of 'supported' is a list of + // mime-types. + // + // >>> bestMatch(['application/xbel+xml', 'text/xml'], 'text/*;q=0.5,*/*; q=0.1') + // 'text/xml' + bestMatch : function(supported, header) { + var parsedHeader = parseRanges(header); + var weighted = []; + for (var i=0; i < supported.length; i++) { + weighted.push([publicMethods.fitnessAndQualityParsed(supported[i], parsedHeader), i, supported[i]]) + }; + weighted.sort(); + return weighted[weighted.length-1][0][1] ? weighted[weighted.length-1][2] : ''; + } + } + return publicMethods; +})(); diff --git a/rel/overlay/var/share/server/render.js b/rel/overlay/var/share/server/render.js new file mode 100644 index 00000000..9dcfbcd6 --- /dev/null +++ b/rel/overlay/var/share/server/render.js @@ -0,0 +1,352 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + + +var Mime = (function() { + // registerType(name, mime-type, mime-type, ...) + // + // Available in query server sandbox. TODO: The list is cleared on reset. + // This registers a particular name with the set of mimetypes it can handle. + // Whoever registers last wins. + // + // Example: + // registerType("html", "text/html; charset=utf-8"); + + var mimesByKey = {}; + var keysByMime = {}; + function registerType() { + var mimes = [], key = arguments[0]; + for (var i=1; i < arguments.length; i++) { + mimes.push(arguments[i]); + }; + mimesByKey[key] = mimes; + for (var i=0; i < mimes.length; i++) { + keysByMime[mimes[i]] = key; + }; + } + + // Some default types + // Ported from Ruby on Rails + // Build list of Mime types for HTTP responses + // http://www.iana.org/assignments/media-types/ + // http://dev.rubyonrails.org/svn/rails/trunk/actionpack/lib/action_controller/mime_types.rb + + registerType("all", "*/*"); + registerType("text", "text/plain; charset=utf-8", "txt"); + registerType("html", "text/html; charset=utf-8"); + registerType("xhtml", "application/xhtml+xml", "xhtml"); + registerType("xml", "application/xml", "text/xml", "application/x-xml"); + registerType("js", "text/javascript", "application/javascript", "application/x-javascript"); + registerType("css", "text/css"); + registerType("ics", "text/calendar"); + registerType("csv", "text/csv"); + registerType("rss", "application/rss+xml"); + registerType("atom", "application/atom+xml"); + registerType("yaml", "application/x-yaml", "text/yaml"); + // just like Rails + registerType("multipart_form", "multipart/form-data"); + registerType("url_encoded_form", "application/x-www-form-urlencoded"); + // http://www.ietf.org/rfc/rfc4627.txt + registerType("json", "application/json", "text/x-json"); + + + var mimeFuns = []; + function provides(type, fun) { + Mime.providesUsed = true; + mimeFuns.push([type, fun]); + }; + + function resetProvides() { + // set globals + Mime.providesUsed = false; + mimeFuns = []; + Mime.responseContentType = null; + }; + + function runProvides(req) { + var supportedMimes = [], bestFun, bestKey = null, accept = req.headers["Accept"]; + if (req.query && req.query.format) { + bestKey = req.query.format; + Mime.responseContentType = mimesByKey[bestKey][0]; + } else if (accept) { + // log("using accept header: "+accept); + mimeFuns.reverse().forEach(function(mimeFun) { + var mimeKey = mimeFun[0]; + if (mimesByKey[mimeKey]) { + supportedMimes = supportedMimes.concat(mimesByKey[mimeKey]); + } + }); + Mime.responseContentType = Mimeparse.bestMatch(supportedMimes, accept); + bestKey = keysByMime[Mime.responseContentType]; + } else { + // just do the first one + bestKey = mimeFuns[0][0]; + Mime.responseContentType = mimesByKey[bestKey][0]; + } + + if (bestKey) { + for (var i=0; i < mimeFuns.length; i++) { + if (mimeFuns[i][0] == bestKey) { + bestFun = mimeFuns[i][1]; + break; + } + }; + }; + + if (bestFun) { + return bestFun(); + } else { + var supportedTypes = mimeFuns.map(function(mf) {return mimesByKey[mf[0]].join(', ') || mf[0]}); + throw(["error","not_acceptable", + "Content-Type "+(accept||bestKey)+" not supported, try one of: "+supportedTypes.join(', ')]); + } + }; + + + return { + registerType : registerType, + provides : provides, + resetProvides : resetProvides, + runProvides : runProvides + } +})(); + + + + +//// +//// Render dispatcher +//// +//// +//// +//// + +var Render = (function() { + var chunks = []; + + + // Start chunks + var startResp = {}; + function start(resp) { + startResp = resp || {}; + }; + + function sendStart() { + startResp = applyContentType((startResp || {}), Mime.responseContentType); + respond(["start", chunks, startResp]); + chunks = []; + startResp = {}; + } + + function applyContentType(resp, responseContentType) { + resp["headers"] = resp["headers"] || {}; + if (responseContentType) { + resp["headers"]["Content-Type"] = resp["headers"]["Content-Type"] || responseContentType; + } + return resp; + } + + function send(chunk) { + chunks.push(chunk.toString()); + }; + + function blowChunks(label) { + respond([label||"chunks", chunks]); + chunks = []; + }; + + var gotRow = false, lastRow = false; + function getRow() { + if (lastRow) return null; + if (!gotRow) { + gotRow = true; + sendStart(); + } else { + blowChunks(); + } + var line = readline(); + var json = eval('('+line+')'); + if (json[0] == "list_end") { + lastRow = true; + return null; + } + if (json[0] != "list_row") { + throw(["fatal", "list_error", "not a row '" + json[0] + "'"]); + } + return json[1]; + }; + + + function maybeWrapResponse(resp) { + var type = typeof resp; + if ((type == "string") || (type == "xml")) { + return {body:resp}; + } else { + return resp; + } + }; + + // from http://javascript.crockford.com/remedial.html + function typeOf(value) { + var s = typeof value; + if (s === 'object') { + if (value) { + if (value instanceof Array) { + s = 'array'; + } + } else { + s = 'null'; + } + } + return s; + }; + + function isDocRequestPath(info) { + var path = info.path; + return path.length > 5; + }; + + function runShow(fun, ddoc, args) { + try { + resetList(); + Mime.resetProvides(); + var resp = fun.apply(ddoc, args) || {}; + + // handle list() style API + if (chunks.length && chunks.length > 0) { + resp = maybeWrapResponse(resp); + resp.headers = resp.headers || {}; + for(var header in startResp) { + resp.headers[header] = startResp[header] + } + resp.body = chunks.join("") + (resp.body || ""); + resetList(); + } + + if (Mime.providesUsed) { + resp = Mime.runProvides(args[1]); + resp = applyContentType(maybeWrapResponse(resp), Mime.responseContentType); + } + + var type = typeOf(resp); + if (type == 'object' || type == 'string') { + respond(["resp", maybeWrapResponse(resp)]); + } else { + throw(["error", "render_error", "undefined response from show function"]); + } + } catch(e) { + if (args[0] === null && isDocRequestPath(args[1])) { + throw(["error", "not_found", "document not found"]); + } else { + renderError(e, fun.toSource()); + } + } + }; + + function runUpdate(fun, ddoc, args) { + try { + var method = args[1].method; + // for analytics logging applications you might want to remove the next line + if (method == "GET") throw(["error","method_not_allowed","Update functions do not allow GET"]); + var result = fun.apply(ddoc, args); + var doc = result[0]; + var resp = result[1]; + var type = typeOf(resp); + if (type == 'object' || type == 'string') { + respond(["up", doc, maybeWrapResponse(resp)]); + } else { + throw(["error", "render_error", "undefined response from update function"]); + } + } catch(e) { + renderError(e, fun.toSource()); + } + }; + + function resetList() { + gotRow = false; + lastRow = false; + chunks = []; + startResp = {}; + }; + + function runList(listFun, ddoc, args) { + try { + Mime.resetProvides(); + resetList(); + head = args[0] + req = args[1] + var tail = listFun.apply(ddoc, args); + + if (Mime.providesUsed) { + tail = Mime.runProvides(req); + } + if (!gotRow) getRow(); + if (typeof tail != "undefined") { + chunks.push(tail); + } + blowChunks("end"); + } catch(e) { + renderError(e, listFun.toSource()); + } + }; + + function renderError(e, funSrc) { + if (e.error && e.reason || e[0] == "error" || e[0] == "fatal") { + throw(e); + } else { + var logMessage = "function raised error: "+e.toSource()+" \nstacktrace: "+e.stack; + log(logMessage); + throw(["error", "render_error", logMessage]); + } + }; + + function escapeHTML(string) { + return string && string.replace(/&/g, "&") + .replace(/</g, "<") + .replace(/>/g, ">"); + }; + + + return { + start : start, + send : send, + getRow : getRow, + show : function(fun, ddoc, args) { + // var showFun = Couch.compileFunction(funSrc); + runShow(fun, ddoc, args); + }, + update : function(fun, ddoc, args) { + // var upFun = Couch.compileFunction(funSrc); + runUpdate(fun, ddoc, args); + }, + list : function(fun, ddoc, args) { + runList(fun, ddoc, args); + } + }; +})(); + +// send = Render.send; +// getRow = Render.getRow; +// start = Render.start; + +// unused. this will be handled in the Erlang side of things. +// function htmlRenderError(e, funSrc) { +// var msg = ["<html><body><h1>Render Error</h1>", +// "<p>JavaScript function raised error: ", +// e.toString(), +// "</p><h2>Stacktrace:</h2><code><pre>", +// escapeHTML(e.stack), +// "</pre></code><h2>Function source:</h2><code><pre>", +// escapeHTML(funSrc), +// "</pre></code></body></html>"].join(''); +// return {body:msg}; +// }; diff --git a/rel/overlay/var/share/server/state.js b/rel/overlay/var/share/server/state.js new file mode 100644 index 00000000..9af9e475 --- /dev/null +++ b/rel/overlay/var/share/server/state.js @@ -0,0 +1,27 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var State = { + reset : function(config) { + // clear the globals and run gc + State.funs = []; + State.query_config = config || {}; + init_sandbox(); + gc(); + print("true"); // indicates success + }, + addFun : function(newFun) { + // Compile to a function and add it to funs array + State.funs.push(Couch.compileFunction(newFun)); + print("true"); + } +} diff --git a/rel/overlay/var/share/server/util.js b/rel/overlay/var/share/server/util.js new file mode 100644 index 00000000..9cc464c3 --- /dev/null +++ b/rel/overlay/var/share/server/util.js @@ -0,0 +1,112 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var resolveModule = function(names, parent, current, path) { + if (names.length == 0) { + if (typeof current != "string") { + throw ["error","invalid_require_path", + 'Must require a JavaScript string, not: '+(typeof current)]; + } + return [current, parent, path]; + } + // we need to traverse the path + var n = names.shift(); + if (n == '..') { + if (!(parent && parent.parent)) { + throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(current)]; + } + path = path.slice(0, path.lastIndexOf('/')); + return resolveModule(names, parent.parent.parent, parent.parent, path); + } else if (n == '.') { + if (!parent) { + throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(current)]; + } + return resolveModule(names, parent.parent, parent, path); + } + if (!current[n]) { + throw ["error", "invalid_require_path", 'Object has no property "'+n+'". '+JSON.stringify(current)]; + } + var p = current; + current = current[n]; + current.parent = p; + path = path ? path + '/' + n : n; + return resolveModule(names, p, current, path); +}; + +var Couch = { + // moving this away from global so we can move to json2.js later + toJSON : function (val) { + return JSON.stringify(val); + }, + compileFunction : function(source, ddoc) { + if (!source) throw(["error","not_found","missing function"]); + try { + if (sandbox) { + if (ddoc) { + var require = function(name, parent) { + if (!parent) {parent = {}}; + var resolved = resolveModule(name.split('/'), parent.actual, ddoc, parent.id); + var s = "function (module, exports, require) { " + resolved[0] + " }"; + var module = {id:resolved[2], actual:resolved[1]}; + module.exports = {}; + try { + var func = sandbox ? evalcx(s, sandbox) : eval(s); + func.apply(sandbox, [module, module.exports, function(name) {return require(name, module)}]); + } catch(e) { + throw ["error","compilation_error","Module require('"+name+"') raised error "+e.toSource()]; + } + return module.exports; + } + sandbox.require = require; + } + var functionObject = evalcx(source, sandbox); + } else { + var functionObject = eval(source); + } + } catch (err) { + throw(["error", "compilation_error", err.toSource() + " (" + source + ")"]); + }; + if (typeof(functionObject) == "function") { + return functionObject; + } else { + throw(["error","compilation_error", + "Expression does not eval to a function. (" + source.toSource() + ")"]); + }; + }, + recursivelySeal : function(obj) { + // seal() is broken in current Spidermonkey + seal(obj); + for (var propname in obj) { + if (typeof doc[propname] == "object") { + recursivelySeal(doc[propname]); + } + } + } +} + +// prints the object as JSON, and rescues and logs any toJSON() related errors +function respond(obj) { + try { + print(Couch.toJSON(obj)); + } catch(e) { + log("Error converting object to JSON: " + e.toString()); + log("error on obj: "+ obj.toSource()); + } +}; + +function log(message) { + // idea: query_server_config option for log level + if (typeof message != "string") { + message = Couch.toJSON(message); + } + respond(["log", message]); +}; diff --git a/rel/overlay/var/share/server/validate.js b/rel/overlay/var/share/server/validate.js new file mode 100644 index 00000000..76a14129 --- /dev/null +++ b/rel/overlay/var/share/server/validate.js @@ -0,0 +1,22 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +var Validate = { + validate : function(fun, ddoc, args) { + try { + fun.apply(ddoc, args); + print("1"); + } catch (error) { + respond(error); + } + } +}; diff --git a/rel/overlay/var/share/server/views.js b/rel/overlay/var/share/server/views.js new file mode 100644 index 00000000..ffe63377 --- /dev/null +++ b/rel/overlay/var/share/server/views.js @@ -0,0 +1,137 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + + + +var Views = (function() { + + var map_results = []; // holds temporary emitted values during doc map + + function runReduce(reduceFuns, keys, values, rereduce) { + for (var i in reduceFuns) { + reduceFuns[i] = Couch.compileFunction(reduceFuns[i]); + }; + var reductions = new Array(reduceFuns.length); + for(var i = 0; i < reduceFuns.length; i++) { + try { + reductions[i] = reduceFuns[i](keys, values, rereduce); + } catch (err) { + handleViewError(err); + // if the error is not fatal, ignore the results and continue + reductions[i] = null; + } + }; + var reduce_line = Couch.toJSON(reductions); + var reduce_length = reduce_line.length; + // TODO make reduce_limit config into a number + if (State.query_config && State.query_config.reduce_limit && + reduce_length > 200 && ((reduce_length * 2) > State.line_length)) { + var reduce_preview = "Current output: '"+(reduce_line.substring(0,100) + "'... (first 100 of "+reduce_length+" bytes)"); + throw(["error", + "reduce_overflow_error", + "Reduce output must shrink more rapidly: "+reduce_preview]); + } else { + print("[true," + reduce_line + "]"); + } + }; + + function handleViewError(err, doc) { + if (err == "fatal_error") { + // Only if it's a "fatal_error" do we exit. What's a fatal error? + // That's for the query to decide. + // + // This will make it possible for queries to completely error out, + // by catching their own local exception and rethrowing a + // fatal_error. But by default if they don't do error handling we + // just eat the exception and carry on. + // + // In this case we abort map processing but don't destroy the + // JavaScript process. If you need to destroy the JavaScript + // process, throw the error form matched by the block below. + throw(["error", "map_runtime_error", "function raised 'fatal_error'"]); + } else if (err[0] == "fatal") { + // Throwing errors of the form ["fatal","error_key","reason"] + // will kill the OS process. This is not normally what you want. + throw(err); + } + var message = "function raised exception " + err.toSource(); + if (doc) message += " with doc._id " + doc._id; + log(message); + }; + + return { + // view helper functions + emit : function(key, value) { + map_results.push([key, value]); + }, + sum : function(values) { + var rv = 0; + for (var i in values) { + rv += values[i]; + } + return rv; + }, + reduce : function(reduceFuns, kvs) { + var keys = new Array(kvs.length); + var values = new Array(kvs.length); + for(var i = 0; i < kvs.length; i++) { + keys[i] = kvs[i][0]; + values[i] = kvs[i][1]; + } + runReduce(reduceFuns, keys, values, false); + }, + rereduce : function(reduceFuns, values) { + runReduce(reduceFuns, null, values, true); + }, + mapDoc : function(doc) { + // Compute all the map functions against the document. + // + // Each function can output multiple key/value pairs for each document. + // + // Example output of map_doc after three functions set by add_fun cmds: + // [ + // [["Key","Value"]], <- fun 1 returned 1 key value + // [], <- fun 2 returned 0 key values + // [["Key1","Value1"],["Key2","Value2"]] <- fun 3 returned 2 key values + // ] + // + + /* + Immutable document support temporarily removed. + + Removed because the seal function no longer works on JS 1.8 arrays, + instead returning an error. The sealing is meant to prevent map + functions from modifying the same document that is passed to other map + functions. However, only map functions in the same design document are + run together, so we have a reasonable expectation they can trust each + other. Any map fun that can't be trusted can be placed in its own + design document, and it cannot affect other map functions. + + recursivelySeal(doc); // seal to prevent map functions from changing doc + */ + var buf = []; + for (var i = 0; i < State.funs.length; i++) { + map_results = []; + try { + State.funs[i](doc); + buf.push(Couch.toJSON(map_results)); + } catch (err) { + handleViewError(err, doc); + // If the error is not fatal, we treat the doc as if it + // did not emit anything, by buffering an empty array. + buf.push("[]"); + } + } + print("[" + buf.join(", ") + "]"); + } + } +})(); diff --git a/share/server/filter.js b/share/server/filter.js deleted file mode 100644 index 1e8556a4..00000000 --- a/share/server/filter.js +++ /dev/null @@ -1,23 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var Filter = { - filter : function(fun, ddoc, args) { - var results = []; - var docs = args[0]; - var req = args[1]; - for (var i=0; i < docs.length; i++) { - results.push((fun.apply(ddoc, [docs[i], req]) && true) || false); - }; - respond([true, results]); - } -}; diff --git a/share/server/json2.js b/share/server/json2.js deleted file mode 100644 index 39d8f370..00000000 --- a/share/server/json2.js +++ /dev/null @@ -1,481 +0,0 @@ -/* - http://www.JSON.org/json2.js - 2009-09-29 - - Public Domain. - - NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. - - See http://www.JSON.org/js.html - - - This code should be minified before deployment. - See http://javascript.crockford.com/jsmin.html - - USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO - NOT CONTROL. - - - This file creates a global JSON object containing two methods: stringify - and parse. - - JSON.stringify(value, replacer, space) - value any JavaScript value, usually an object or array. - - replacer an optional parameter that determines how object - values are stringified for objects. It can be a - function or an array of strings. - - space an optional parameter that specifies the indentation - of nested structures. If it is omitted, the text will - be packed without extra whitespace. If it is a number, - it will specify the number of spaces to indent at each - level. If it is a string (such as '\t' or ' '), - it contains the characters used to indent at each level. - - This method produces a JSON text from a JavaScript value. - - When an object value is found, if the object contains a toJSON - method, its toJSON method will be called and the result will be - stringified. A toJSON method does not serialize: it returns the - value represented by the name/value pair that should be serialized, - or undefined if nothing should be serialized. The toJSON method - will be passed the key associated with the value, and this will be - bound to the value - - For example, this would serialize Dates as ISO strings. - - Date.prototype.toJSON = function (key) { - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } - - return this.getUTCFullYear() + '-' + - f(this.getUTCMonth() + 1) + '-' + - f(this.getUTCDate()) + 'T' + - f(this.getUTCHours()) + ':' + - f(this.getUTCMinutes()) + ':' + - f(this.getUTCSeconds()) + 'Z'; - }; - - You can provide an optional replacer method. It will be passed the - key and value of each member, with this bound to the containing - object. The value that is returned from your method will be - serialized. If your method returns undefined, then the member will - be excluded from the serialization. - - If the replacer parameter is an array of strings, then it will be - used to select the members to be serialized. It filters the results - such that only members with keys listed in the replacer array are - stringified. - - Values that do not have JSON representations, such as undefined or - functions, will not be serialized. Such values in objects will be - dropped; in arrays they will be replaced with null. You can use - a replacer function to replace those with JSON values. - JSON.stringify(undefined) returns undefined. - - The optional space parameter produces a stringification of the - value that is filled with line breaks and indentation to make it - easier to read. - - If the space parameter is a non-empty string, then that string will - be used for indentation. If the space parameter is a number, then - the indentation will be that many spaces. - - Example: - - text = JSON.stringify(['e', {pluribus: 'unum'}]); - // text is '["e",{"pluribus":"unum"}]' - - - text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); - // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' - - text = JSON.stringify([new Date()], function (key, value) { - return this[key] instanceof Date ? - 'Date(' + this[key] + ')' : value; - }); - // text is '["Date(---current time---)"]' - - - JSON.parse(text, reviver) - This method parses a JSON text to produce an object or array. - It can throw a SyntaxError exception. - - The optional reviver parameter is a function that can filter and - transform the results. It receives each of the keys and values, - and its return value is used instead of the original value. - If it returns what it received, then the structure is not modified. - If it returns undefined then the member is deleted. - - Example: - - // Parse the text. Values that look like ISO date strings will - // be converted to Date objects. - - myData = JSON.parse(text, function (key, value) { - var a; - if (typeof value === 'string') { - a = -/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); - if (a) { - return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], - +a[5], +a[6])); - } - } - return value; - }); - - myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { - var d; - if (typeof value === 'string' && - value.slice(0, 5) === 'Date(' && - value.slice(-1) === ')') { - d = new Date(value.slice(5, -1)); - if (d) { - return d; - } - } - return value; - }); - - - This is a reference implementation. You are free to copy, modify, or - redistribute. -*/ - -/*jslint evil: true, strict: false */ - -/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, - call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, - getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, - lastIndex, length, parse, prototype, push, replace, slice, stringify, - test, toJSON, toString, valueOf -*/ - - -// Create a JSON object only if one does not already exist. We create the -// methods in a closure to avoid creating global variables. - -if (!this.JSON) { - this.JSON = {}; -} - -(function () { - - function f(n) { - // Format integers to have at least two digits. - return n < 10 ? '0' + n : n; - } - - if (typeof Date.prototype.toJSON !== 'function') { - - Date.prototype.toJSON = function (key) { - - return isFinite(this.valueOf()) ? - this.getUTCFullYear() + '-' + - f(this.getUTCMonth() + 1) + '-' + - f(this.getUTCDate()) + 'T' + - f(this.getUTCHours()) + ':' + - f(this.getUTCMinutes()) + ':' + - f(this.getUTCSeconds()) + 'Z' : null; - }; - - String.prototype.toJSON = - Number.prototype.toJSON = - Boolean.prototype.toJSON = function (key) { - return this.valueOf(); - }; - } - - var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, - gap, - indent, - meta = { // table of character substitutions - '\b': '\\b', - '\t': '\\t', - '\n': '\\n', - '\f': '\\f', - '\r': '\\r', - '"' : '\\"', - '\\': '\\\\' - }, - rep; - - - function quote(string) { - -// If the string contains no control characters, no quote characters, and no -// backslash characters, then we can safely slap some quotes around it. -// Otherwise we must also replace the offending characters with safe escape -// sequences. - - escapable.lastIndex = 0; - return escapable.test(string) ? - '"' + string.replace(escapable, function (a) { - var c = meta[a]; - return typeof c === 'string' ? c : - '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); - }) + '"' : - '"' + string + '"'; - } - - - function str(key, holder) { - -// Produce a string from holder[key]. - - var i, // The loop counter. - k, // The member key. - v, // The member value. - length, - mind = gap, - partial, - value = holder[key]; - -// If the value has a toJSON method, call it to obtain a replacement value. - - if (value && typeof value === 'object' && - typeof value.toJSON === 'function') { - value = value.toJSON(key); - } - -// If we were called with a replacer function, then call the replacer to -// obtain a replacement value. - - if (typeof rep === 'function') { - value = rep.call(holder, key, value); - } - -// What happens next depends on the value's type. - - switch (typeof value) { - case 'string': - return quote(value); - - case 'number': - -// JSON numbers must be finite. Encode non-finite numbers as null. - - return isFinite(value) ? String(value) : 'null'; - - case 'boolean': - case 'null': - -// If the value is a boolean or null, convert it to a string. Note: -// typeof null does not produce 'null'. The case is included here in -// the remote chance that this gets fixed someday. - - return String(value); - -// If the type is 'object', we might be dealing with an object or an array or -// null. - - case 'object': - -// Due to a specification blunder in ECMAScript, typeof null is 'object', -// so watch out for that case. - - if (!value) { - return 'null'; - } - -// Make an array to hold the partial results of stringifying this object value. - - gap += indent; - partial = []; - -// Is the value an array? - - if (Object.prototype.toString.apply(value) === '[object Array]') { - -// The value is an array. Stringify every element. Use null as a placeholder -// for non-JSON values. - - length = value.length; - for (i = 0; i < length; i += 1) { - partial[i] = str(i, value) || 'null'; - } - -// Join all of the elements together, separated with commas, and wrap them in -// brackets. - - v = partial.length === 0 ? '[]' : - gap ? '[\n' + gap + - partial.join(',\n' + gap) + '\n' + - mind + ']' : - '[' + partial.join(',') + ']'; - gap = mind; - return v; - } - -// If the replacer is an array, use it to select the members to be stringified. - - if (rep && typeof rep === 'object') { - length = rep.length; - for (i = 0; i < length; i += 1) { - k = rep[i]; - if (typeof k === 'string') { - v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - } - } - } else { - -// Otherwise, iterate through all of the keys in the object. - - for (k in value) { - if (Object.hasOwnProperty.call(value, k)) { - v = str(k, value); - if (v) { - partial.push(quote(k) + (gap ? ': ' : ':') + v); - } - } - } - } - -// Join all of the member texts together, separated with commas, -// and wrap them in braces. - - v = partial.length === 0 ? '{}' : - gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + - mind + '}' : '{' + partial.join(',') + '}'; - gap = mind; - return v; - } - } - -// If the JSON object does not yet have a stringify method, give it one. - - if (typeof JSON.stringify !== 'function') { - JSON.stringify = function (value, replacer, space) { - -// The stringify method takes a value and an optional replacer, and an optional -// space parameter, and returns a JSON text. The replacer can be a function -// that can replace values, or an array of strings that will select the keys. -// A default replacer method can be provided. Use of the space parameter can -// produce text that is more easily readable. - - var i; - gap = ''; - indent = ''; - -// If the space parameter is a number, make an indent string containing that -// many spaces. - - if (typeof space === 'number') { - for (i = 0; i < space; i += 1) { - indent += ' '; - } - -// If the space parameter is a string, it will be used as the indent string. - - } else if (typeof space === 'string') { - indent = space; - } - -// If there is a replacer, it must be a function or an array. -// Otherwise, throw an error. - - rep = replacer; - if (replacer && typeof replacer !== 'function' && - (typeof replacer !== 'object' || - typeof replacer.length !== 'number')) { - throw new Error('JSON.stringify'); - } - -// Make a fake root object containing our value under the key of ''. -// Return the result of stringifying the value. - - return str('', {'': value}); - }; - } - - -// If the JSON object does not yet have a parse method, give it one. - - if (typeof JSON.parse !== 'function') { - JSON.parse = function (text, reviver) { - -// The parse method takes a text and an optional reviver function, and returns -// a JavaScript value if the text is a valid JSON text. - - var j; - - function walk(holder, key) { - -// The walk method is used to recursively walk the resulting structure so -// that modifications can be made. - - var k, v, value = holder[key]; - if (value && typeof value === 'object') { - for (k in value) { - if (Object.hasOwnProperty.call(value, k)) { - v = walk(value, k); - if (v !== undefined) { - value[k] = v; - } else { - delete value[k]; - } - } - } - } - return reviver.call(holder, key, value); - } - - -// Parsing happens in four stages. In the first stage, we replace certain -// Unicode characters with escape sequences. JavaScript handles many characters -// incorrectly, either silently deleting them, or treating them as line endings. - - cx.lastIndex = 0; - if (cx.test(text)) { - text = text.replace(cx, function (a) { - return '\\u' + - ('0000' + a.charCodeAt(0).toString(16)).slice(-4); - }); - } - -// In the second stage, we run the text against regular expressions that look -// for non-JSON patterns. We are especially concerned with '()' and 'new' -// because they can cause invocation, and '=' because it can cause mutation. -// But just to be safe, we want to reject all unexpected forms. - -// We split the second stage into 4 regexp operations in order to work around -// crippling inefficiencies in IE's and Safari's regexp engines. First we -// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we -// replace all simple value tokens with ']' characters. Third, we delete all -// open brackets that follow a colon or comma or that begin the text. Finally, -// we look to see that the remaining characters are only whitespace or ']' or -// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval. - - if (/^[\],:{}\s]*$/. -test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@'). -replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']'). -replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) { - -// In the third stage we use the eval function to compile the text into a -// JavaScript structure. The '{' operator is subject to a syntactic ambiguity -// in JavaScript: it can begin a block or an object literal. We wrap the text -// in parens to eliminate the ambiguity. - - j = eval('(' + text + ')'); - -// In the optional fourth stage, we recursively walk the new structure, passing -// each name/value pair to a reviver function for possible transformation. - - return typeof reviver === 'function' ? - walk({'': j}, '') : j; - } - -// If the text is not JSON parseable, then a SyntaxError is thrown. - - throw new SyntaxError('JSON.parse'); - }; - } -}()); diff --git a/share/server/loop.js b/share/server/loop.js deleted file mode 100644 index 300151e9..00000000 --- a/share/server/loop.js +++ /dev/null @@ -1,140 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var sandbox = null; - -function init_sandbox() { - try { - // if possible, use evalcx (not always available) - sandbox = evalcx(''); - sandbox.emit = Views.emit; - sandbox.sum = Views.sum; - sandbox.log = log; - sandbox.toJSON = Couch.toJSON; - sandbox.JSON = JSON; - sandbox.provides = Mime.provides; - sandbox.registerType = Mime.registerType; - sandbox.start = Render.start; - sandbox.send = Render.send; - sandbox.getRow = Render.getRow; - } catch (e) { - log(e.toSource()); - } -}; -init_sandbox(); - -// Commands are in the form of json arrays: -// ["commandname",..optional args...]\n -// -// Responses are json values followed by a new line ("\n") - -var DDoc = (function() { - var ddoc_dispatch = { - "lists" : Render.list, - "shows" : Render.show, - "filters" : Filter.filter, - "updates" : Render.update, - "validate_doc_update" : Validate.validate - }; - var ddocs = {}; - return { - ddoc : function() { - var args = []; - for (var i=0; i < arguments.length; i++) { - args.push(arguments[i]); - }; - var ddocId = args.shift(); - if (ddocId == "new") { - // get the real ddocId. - ddocId = args.shift(); - // store the ddoc, functions are lazily compiled. - ddocs[ddocId] = args.shift(); - print("true"); - } else { - // Couch makes sure we know this ddoc already. - var ddoc = ddocs[ddocId]; - if (!ddoc) throw(["fatal", "query_protocol_error", "uncached design doc: "+ddocId]); - var funPath = args.shift(); - var cmd = funPath[0]; - // the first member of the fun path determines the type of operation - var funArgs = args.shift(); - if (ddoc_dispatch[cmd]) { - // get the function, call the command with it - var point = ddoc; - for (var i=0; i < funPath.length; i++) { - if (i+1 == funPath.length) { - fun = point[funPath[i]] - if (typeof fun != "function") { - fun = Couch.compileFunction(fun, ddoc); - // cache the compiled fun on the ddoc - point[funPath[i]] = fun - }; - } else { - point = point[funPath[i]] - } - }; - - // run the correct responder with the cmd body - ddoc_dispatch[cmd].apply(null, [fun, ddoc, funArgs]); - } else { - // unknown command, quit and hope the restarted version is better - throw(["fatal", "unknown_command", "unknown ddoc command '" + cmd + "'"]); - } - } - } - }; -})(); - -var Loop = function() { - var line, cmd, cmdkey, dispatch = { - "ddoc" : DDoc.ddoc, - // "view" : Views.handler, - "reset" : State.reset, - "add_fun" : State.addFun, - "map_doc" : Views.mapDoc, - "reduce" : Views.reduce, - "rereduce" : Views.rereduce - }; - function handleError(e) { - var type = e[0]; - if (type == "fatal") { - e[0] = "error"; // we tell the client it was a fatal error by dying - respond(e); - quit(-1); - } else if (type == "error") { - respond(e); - } else if (e.error && e.reason) { - // compatibility with old error format - respond(["error", e.error, e.reason]); - } else { - respond(["error","unnamed_error",e.toSource()]); - } - }; - while (line = readline()) { - cmd = eval('('+line+')'); - State.line_length = line.length; - try { - cmdkey = cmd.shift(); - if (dispatch[cmdkey]) { - // run the correct responder with the cmd body - dispatch[cmdkey].apply(null, cmd); - } else { - // unknown command, quit and hope the restarted version is better - throw(["fatal", "unknown_command", "unknown command '" + cmdkey + "'"]); - } - } catch(e) { - handleError(e); - } - }; -}; - -Loop(); diff --git a/share/server/mimeparse.js b/share/server/mimeparse.js deleted file mode 100644 index 3642a194..00000000 --- a/share/server/mimeparse.js +++ /dev/null @@ -1,158 +0,0 @@ -// mimeparse.js -// -// This module provides basic functions for handling mime-types. It can -// handle matching mime-types against a list of media-ranges. See section -// 14.1 of the HTTP specification [RFC 2616] for a complete explanation. -// -// http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 -// -// A port to JavaScript of Joe Gregorio's MIME-Type Parser: -// -// http://code.google.com/p/mimeparse/ -// -// Ported by J. Chris Anderson <jchris@apache.org>, targeting the Spidermonkey runtime. -// -// To run the tests, open mimeparse-js-test.html in a browser. -// Ported from version 0.1.2 -// Comments are mostly excerpted from the original. - -var Mimeparse = (function() { - // private helpers - function strip(string) { - return string.replace(/^\s+/, '').replace(/\s+$/, '') - }; - - function parseRanges(ranges) { - var parsedRanges = [], rangeParts = ranges.split(","); - for (var i=0; i < rangeParts.length; i++) { - parsedRanges.push(publicMethods.parseMediaRange(rangeParts[i])) - }; - return parsedRanges; - }; - - var publicMethods = { - // Carves up a mime-type and returns an Array of the - // [type, subtype, params] where "params" is a Hash of all - // the parameters for the media range. - // - // For example, the media range "application/xhtml;q=0.5" would - // get parsed into: - // - // ["application", "xhtml", { "q" : "0.5" }] - parseMimeType : function(mimeType) { - var fullType, typeParts, params = {}, parts = mimeType.split(';'); - for (var i=0; i < parts.length; i++) { - var p = parts[i].split('='); - if (p.length == 2) { - params[strip(p[0])] = strip(p[1]); - } - }; - fullType = parts[0].replace(/^\s+/, '').replace(/\s+$/, ''); - if (fullType == '*') fullType = '*/*'; - typeParts = fullType.split('/'); - return [typeParts[0], typeParts[1], params]; - }, - - // Carves up a media range and returns an Array of the - // [type, subtype, params] where "params" is a Object with - // all the parameters for the media range. - // - // For example, the media range "application/*;q=0.5" would - // get parsed into: - // - // ["application", "*", { "q" : "0.5" }] - // - // In addition this function also guarantees that there - // is a value for "q" in the params dictionary, filling it - // in with a proper default if necessary. - parseMediaRange : function(range) { - var q, parsedType = this.parseMimeType(range); - if (!parsedType[2]['q']) { - parsedType[2]['q'] = '1'; - } else { - q = parseFloat(parsedType[2]['q']); - if (isNaN(q)) { - parsedType[2]['q'] = '1'; - } else if (q > 1 || q < 0) { - parsedType[2]['q'] = '1'; - } - } - return parsedType; - }, - - // Find the best match for a given mime-type against - // a list of media_ranges that have already been - // parsed by parseMediaRange(). Returns an array of - // the fitness value and the value of the 'q' quality - // parameter of the best match, or (-1, 0) if no match - // was found. Just as for qualityParsed(), 'parsed_ranges' - // must be a list of parsed media ranges. - fitnessAndQualityParsed : function(mimeType, parsedRanges) { - var bestFitness = -1, bestFitQ = 0, target = this.parseMediaRange(mimeType); - var targetType = target[0], targetSubtype = target[1], targetParams = target[2]; - - for (var i=0; i < parsedRanges.length; i++) { - var parsed = parsedRanges[i]; - var type = parsed[0], subtype = parsed[1], params = parsed[2]; - if ((type == targetType || type == "*" || targetType == "*") && - (subtype == targetSubtype || subtype == "*" || targetSubtype == "*")) { - var matchCount = 0; - for (param in targetParams) { - if (param != 'q' && params[param] && params[param] == targetParams[param]) { - matchCount += 1; - } - } - - var fitness = (type == targetType) ? 100 : 0; - fitness += (subtype == targetSubtype) ? 10 : 0; - fitness += matchCount; - - if (fitness > bestFitness) { - bestFitness = fitness; - bestFitQ = params["q"]; - } - } - }; - return [bestFitness, parseFloat(bestFitQ)]; - }, - - // Find the best match for a given mime-type against - // a list of media_ranges that have already been - // parsed by parseMediaRange(). Returns the - // 'q' quality parameter of the best match, 0 if no - // match was found. This function bahaves the same as quality() - // except that 'parsedRanges' must be a list of - // parsed media ranges. - qualityParsed : function(mimeType, parsedRanges) { - return this.fitnessAndQualityParsed(mimeType, parsedRanges)[1]; - }, - - // Returns the quality 'q' of a mime-type when compared - // against the media-ranges in ranges. For example: - // - // >>> Mimeparse.quality('text/html','text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5') - // 0.7 - quality : function(mimeType, ranges) { - return this.qualityParsed(mimeType, parseRanges(ranges)); - }, - - // Takes a list of supported mime-types and finds the best - // match for all the media-ranges listed in header. The value of - // header must be a string that conforms to the format of the - // HTTP Accept: header. The value of 'supported' is a list of - // mime-types. - // - // >>> bestMatch(['application/xbel+xml', 'text/xml'], 'text/*;q=0.5,*/*; q=0.1') - // 'text/xml' - bestMatch : function(supported, header) { - var parsedHeader = parseRanges(header); - var weighted = []; - for (var i=0; i < supported.length; i++) { - weighted.push([publicMethods.fitnessAndQualityParsed(supported[i], parsedHeader), i, supported[i]]) - }; - weighted.sort(); - return weighted[weighted.length-1][0][1] ? weighted[weighted.length-1][2] : ''; - } - } - return publicMethods; -})(); diff --git a/share/server/render.js b/share/server/render.js deleted file mode 100644 index 9dcfbcd6..00000000 --- a/share/server/render.js +++ /dev/null @@ -1,352 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - - -var Mime = (function() { - // registerType(name, mime-type, mime-type, ...) - // - // Available in query server sandbox. TODO: The list is cleared on reset. - // This registers a particular name with the set of mimetypes it can handle. - // Whoever registers last wins. - // - // Example: - // registerType("html", "text/html; charset=utf-8"); - - var mimesByKey = {}; - var keysByMime = {}; - function registerType() { - var mimes = [], key = arguments[0]; - for (var i=1; i < arguments.length; i++) { - mimes.push(arguments[i]); - }; - mimesByKey[key] = mimes; - for (var i=0; i < mimes.length; i++) { - keysByMime[mimes[i]] = key; - }; - } - - // Some default types - // Ported from Ruby on Rails - // Build list of Mime types for HTTP responses - // http://www.iana.org/assignments/media-types/ - // http://dev.rubyonrails.org/svn/rails/trunk/actionpack/lib/action_controller/mime_types.rb - - registerType("all", "*/*"); - registerType("text", "text/plain; charset=utf-8", "txt"); - registerType("html", "text/html; charset=utf-8"); - registerType("xhtml", "application/xhtml+xml", "xhtml"); - registerType("xml", "application/xml", "text/xml", "application/x-xml"); - registerType("js", "text/javascript", "application/javascript", "application/x-javascript"); - registerType("css", "text/css"); - registerType("ics", "text/calendar"); - registerType("csv", "text/csv"); - registerType("rss", "application/rss+xml"); - registerType("atom", "application/atom+xml"); - registerType("yaml", "application/x-yaml", "text/yaml"); - // just like Rails - registerType("multipart_form", "multipart/form-data"); - registerType("url_encoded_form", "application/x-www-form-urlencoded"); - // http://www.ietf.org/rfc/rfc4627.txt - registerType("json", "application/json", "text/x-json"); - - - var mimeFuns = []; - function provides(type, fun) { - Mime.providesUsed = true; - mimeFuns.push([type, fun]); - }; - - function resetProvides() { - // set globals - Mime.providesUsed = false; - mimeFuns = []; - Mime.responseContentType = null; - }; - - function runProvides(req) { - var supportedMimes = [], bestFun, bestKey = null, accept = req.headers["Accept"]; - if (req.query && req.query.format) { - bestKey = req.query.format; - Mime.responseContentType = mimesByKey[bestKey][0]; - } else if (accept) { - // log("using accept header: "+accept); - mimeFuns.reverse().forEach(function(mimeFun) { - var mimeKey = mimeFun[0]; - if (mimesByKey[mimeKey]) { - supportedMimes = supportedMimes.concat(mimesByKey[mimeKey]); - } - }); - Mime.responseContentType = Mimeparse.bestMatch(supportedMimes, accept); - bestKey = keysByMime[Mime.responseContentType]; - } else { - // just do the first one - bestKey = mimeFuns[0][0]; - Mime.responseContentType = mimesByKey[bestKey][0]; - } - - if (bestKey) { - for (var i=0; i < mimeFuns.length; i++) { - if (mimeFuns[i][0] == bestKey) { - bestFun = mimeFuns[i][1]; - break; - } - }; - }; - - if (bestFun) { - return bestFun(); - } else { - var supportedTypes = mimeFuns.map(function(mf) {return mimesByKey[mf[0]].join(', ') || mf[0]}); - throw(["error","not_acceptable", - "Content-Type "+(accept||bestKey)+" not supported, try one of: "+supportedTypes.join(', ')]); - } - }; - - - return { - registerType : registerType, - provides : provides, - resetProvides : resetProvides, - runProvides : runProvides - } -})(); - - - - -//// -//// Render dispatcher -//// -//// -//// -//// - -var Render = (function() { - var chunks = []; - - - // Start chunks - var startResp = {}; - function start(resp) { - startResp = resp || {}; - }; - - function sendStart() { - startResp = applyContentType((startResp || {}), Mime.responseContentType); - respond(["start", chunks, startResp]); - chunks = []; - startResp = {}; - } - - function applyContentType(resp, responseContentType) { - resp["headers"] = resp["headers"] || {}; - if (responseContentType) { - resp["headers"]["Content-Type"] = resp["headers"]["Content-Type"] || responseContentType; - } - return resp; - } - - function send(chunk) { - chunks.push(chunk.toString()); - }; - - function blowChunks(label) { - respond([label||"chunks", chunks]); - chunks = []; - }; - - var gotRow = false, lastRow = false; - function getRow() { - if (lastRow) return null; - if (!gotRow) { - gotRow = true; - sendStart(); - } else { - blowChunks(); - } - var line = readline(); - var json = eval('('+line+')'); - if (json[0] == "list_end") { - lastRow = true; - return null; - } - if (json[0] != "list_row") { - throw(["fatal", "list_error", "not a row '" + json[0] + "'"]); - } - return json[1]; - }; - - - function maybeWrapResponse(resp) { - var type = typeof resp; - if ((type == "string") || (type == "xml")) { - return {body:resp}; - } else { - return resp; - } - }; - - // from http://javascript.crockford.com/remedial.html - function typeOf(value) { - var s = typeof value; - if (s === 'object') { - if (value) { - if (value instanceof Array) { - s = 'array'; - } - } else { - s = 'null'; - } - } - return s; - }; - - function isDocRequestPath(info) { - var path = info.path; - return path.length > 5; - }; - - function runShow(fun, ddoc, args) { - try { - resetList(); - Mime.resetProvides(); - var resp = fun.apply(ddoc, args) || {}; - - // handle list() style API - if (chunks.length && chunks.length > 0) { - resp = maybeWrapResponse(resp); - resp.headers = resp.headers || {}; - for(var header in startResp) { - resp.headers[header] = startResp[header] - } - resp.body = chunks.join("") + (resp.body || ""); - resetList(); - } - - if (Mime.providesUsed) { - resp = Mime.runProvides(args[1]); - resp = applyContentType(maybeWrapResponse(resp), Mime.responseContentType); - } - - var type = typeOf(resp); - if (type == 'object' || type == 'string') { - respond(["resp", maybeWrapResponse(resp)]); - } else { - throw(["error", "render_error", "undefined response from show function"]); - } - } catch(e) { - if (args[0] === null && isDocRequestPath(args[1])) { - throw(["error", "not_found", "document not found"]); - } else { - renderError(e, fun.toSource()); - } - } - }; - - function runUpdate(fun, ddoc, args) { - try { - var method = args[1].method; - // for analytics logging applications you might want to remove the next line - if (method == "GET") throw(["error","method_not_allowed","Update functions do not allow GET"]); - var result = fun.apply(ddoc, args); - var doc = result[0]; - var resp = result[1]; - var type = typeOf(resp); - if (type == 'object' || type == 'string') { - respond(["up", doc, maybeWrapResponse(resp)]); - } else { - throw(["error", "render_error", "undefined response from update function"]); - } - } catch(e) { - renderError(e, fun.toSource()); - } - }; - - function resetList() { - gotRow = false; - lastRow = false; - chunks = []; - startResp = {}; - }; - - function runList(listFun, ddoc, args) { - try { - Mime.resetProvides(); - resetList(); - head = args[0] - req = args[1] - var tail = listFun.apply(ddoc, args); - - if (Mime.providesUsed) { - tail = Mime.runProvides(req); - } - if (!gotRow) getRow(); - if (typeof tail != "undefined") { - chunks.push(tail); - } - blowChunks("end"); - } catch(e) { - renderError(e, listFun.toSource()); - } - }; - - function renderError(e, funSrc) { - if (e.error && e.reason || e[0] == "error" || e[0] == "fatal") { - throw(e); - } else { - var logMessage = "function raised error: "+e.toSource()+" \nstacktrace: "+e.stack; - log(logMessage); - throw(["error", "render_error", logMessage]); - } - }; - - function escapeHTML(string) { - return string && string.replace(/&/g, "&") - .replace(/</g, "<") - .replace(/>/g, ">"); - }; - - - return { - start : start, - send : send, - getRow : getRow, - show : function(fun, ddoc, args) { - // var showFun = Couch.compileFunction(funSrc); - runShow(fun, ddoc, args); - }, - update : function(fun, ddoc, args) { - // var upFun = Couch.compileFunction(funSrc); - runUpdate(fun, ddoc, args); - }, - list : function(fun, ddoc, args) { - runList(fun, ddoc, args); - } - }; -})(); - -// send = Render.send; -// getRow = Render.getRow; -// start = Render.start; - -// unused. this will be handled in the Erlang side of things. -// function htmlRenderError(e, funSrc) { -// var msg = ["<html><body><h1>Render Error</h1>", -// "<p>JavaScript function raised error: ", -// e.toString(), -// "</p><h2>Stacktrace:</h2><code><pre>", -// escapeHTML(e.stack), -// "</pre></code><h2>Function source:</h2><code><pre>", -// escapeHTML(funSrc), -// "</pre></code></body></html>"].join(''); -// return {body:msg}; -// }; diff --git a/share/server/state.js b/share/server/state.js deleted file mode 100644 index 9af9e475..00000000 --- a/share/server/state.js +++ /dev/null @@ -1,27 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var State = { - reset : function(config) { - // clear the globals and run gc - State.funs = []; - State.query_config = config || {}; - init_sandbox(); - gc(); - print("true"); // indicates success - }, - addFun : function(newFun) { - // Compile to a function and add it to funs array - State.funs.push(Couch.compileFunction(newFun)); - print("true"); - } -} diff --git a/share/server/util.js b/share/server/util.js deleted file mode 100644 index 9cc464c3..00000000 --- a/share/server/util.js +++ /dev/null @@ -1,112 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var resolveModule = function(names, parent, current, path) { - if (names.length == 0) { - if (typeof current != "string") { - throw ["error","invalid_require_path", - 'Must require a JavaScript string, not: '+(typeof current)]; - } - return [current, parent, path]; - } - // we need to traverse the path - var n = names.shift(); - if (n == '..') { - if (!(parent && parent.parent)) { - throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(current)]; - } - path = path.slice(0, path.lastIndexOf('/')); - return resolveModule(names, parent.parent.parent, parent.parent, path); - } else if (n == '.') { - if (!parent) { - throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(current)]; - } - return resolveModule(names, parent.parent, parent, path); - } - if (!current[n]) { - throw ["error", "invalid_require_path", 'Object has no property "'+n+'". '+JSON.stringify(current)]; - } - var p = current; - current = current[n]; - current.parent = p; - path = path ? path + '/' + n : n; - return resolveModule(names, p, current, path); -}; - -var Couch = { - // moving this away from global so we can move to json2.js later - toJSON : function (val) { - return JSON.stringify(val); - }, - compileFunction : function(source, ddoc) { - if (!source) throw(["error","not_found","missing function"]); - try { - if (sandbox) { - if (ddoc) { - var require = function(name, parent) { - if (!parent) {parent = {}}; - var resolved = resolveModule(name.split('/'), parent.actual, ddoc, parent.id); - var s = "function (module, exports, require) { " + resolved[0] + " }"; - var module = {id:resolved[2], actual:resolved[1]}; - module.exports = {}; - try { - var func = sandbox ? evalcx(s, sandbox) : eval(s); - func.apply(sandbox, [module, module.exports, function(name) {return require(name, module)}]); - } catch(e) { - throw ["error","compilation_error","Module require('"+name+"') raised error "+e.toSource()]; - } - return module.exports; - } - sandbox.require = require; - } - var functionObject = evalcx(source, sandbox); - } else { - var functionObject = eval(source); - } - } catch (err) { - throw(["error", "compilation_error", err.toSource() + " (" + source + ")"]); - }; - if (typeof(functionObject) == "function") { - return functionObject; - } else { - throw(["error","compilation_error", - "Expression does not eval to a function. (" + source.toSource() + ")"]); - }; - }, - recursivelySeal : function(obj) { - // seal() is broken in current Spidermonkey - seal(obj); - for (var propname in obj) { - if (typeof doc[propname] == "object") { - recursivelySeal(doc[propname]); - } - } - } -} - -// prints the object as JSON, and rescues and logs any toJSON() related errors -function respond(obj) { - try { - print(Couch.toJSON(obj)); - } catch(e) { - log("Error converting object to JSON: " + e.toString()); - log("error on obj: "+ obj.toSource()); - } -}; - -function log(message) { - // idea: query_server_config option for log level - if (typeof message != "string") { - message = Couch.toJSON(message); - } - respond(["log", message]); -}; diff --git a/share/server/validate.js b/share/server/validate.js deleted file mode 100644 index 76a14129..00000000 --- a/share/server/validate.js +++ /dev/null @@ -1,22 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var Validate = { - validate : function(fun, ddoc, args) { - try { - fun.apply(ddoc, args); - print("1"); - } catch (error) { - respond(error); - } - } -}; diff --git a/share/server/views.js b/share/server/views.js deleted file mode 100644 index ffe63377..00000000 --- a/share/server/views.js +++ /dev/null @@ -1,137 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - - - -var Views = (function() { - - var map_results = []; // holds temporary emitted values during doc map - - function runReduce(reduceFuns, keys, values, rereduce) { - for (var i in reduceFuns) { - reduceFuns[i] = Couch.compileFunction(reduceFuns[i]); - }; - var reductions = new Array(reduceFuns.length); - for(var i = 0; i < reduceFuns.length; i++) { - try { - reductions[i] = reduceFuns[i](keys, values, rereduce); - } catch (err) { - handleViewError(err); - // if the error is not fatal, ignore the results and continue - reductions[i] = null; - } - }; - var reduce_line = Couch.toJSON(reductions); - var reduce_length = reduce_line.length; - // TODO make reduce_limit config into a number - if (State.query_config && State.query_config.reduce_limit && - reduce_length > 200 && ((reduce_length * 2) > State.line_length)) { - var reduce_preview = "Current output: '"+(reduce_line.substring(0,100) + "'... (first 100 of "+reduce_length+" bytes)"); - throw(["error", - "reduce_overflow_error", - "Reduce output must shrink more rapidly: "+reduce_preview]); - } else { - print("[true," + reduce_line + "]"); - } - }; - - function handleViewError(err, doc) { - if (err == "fatal_error") { - // Only if it's a "fatal_error" do we exit. What's a fatal error? - // That's for the query to decide. - // - // This will make it possible for queries to completely error out, - // by catching their own local exception and rethrowing a - // fatal_error. But by default if they don't do error handling we - // just eat the exception and carry on. - // - // In this case we abort map processing but don't destroy the - // JavaScript process. If you need to destroy the JavaScript - // process, throw the error form matched by the block below. - throw(["error", "map_runtime_error", "function raised 'fatal_error'"]); - } else if (err[0] == "fatal") { - // Throwing errors of the form ["fatal","error_key","reason"] - // will kill the OS process. This is not normally what you want. - throw(err); - } - var message = "function raised exception " + err.toSource(); - if (doc) message += " with doc._id " + doc._id; - log(message); - }; - - return { - // view helper functions - emit : function(key, value) { - map_results.push([key, value]); - }, - sum : function(values) { - var rv = 0; - for (var i in values) { - rv += values[i]; - } - return rv; - }, - reduce : function(reduceFuns, kvs) { - var keys = new Array(kvs.length); - var values = new Array(kvs.length); - for(var i = 0; i < kvs.length; i++) { - keys[i] = kvs[i][0]; - values[i] = kvs[i][1]; - } - runReduce(reduceFuns, keys, values, false); - }, - rereduce : function(reduceFuns, values) { - runReduce(reduceFuns, null, values, true); - }, - mapDoc : function(doc) { - // Compute all the map functions against the document. - // - // Each function can output multiple key/value pairs for each document. - // - // Example output of map_doc after three functions set by add_fun cmds: - // [ - // [["Key","Value"]], <- fun 1 returned 1 key value - // [], <- fun 2 returned 0 key values - // [["Key1","Value1"],["Key2","Value2"]] <- fun 3 returned 2 key values - // ] - // - - /* - Immutable document support temporarily removed. - - Removed because the seal function no longer works on JS 1.8 arrays, - instead returning an error. The sealing is meant to prevent map - functions from modifying the same document that is passed to other map - functions. However, only map functions in the same design document are - run together, so we have a reasonable expectation they can trust each - other. Any map fun that can't be trusted can be placed in its own - design document, and it cannot affect other map functions. - - recursivelySeal(doc); // seal to prevent map functions from changing doc - */ - var buf = []; - for (var i = 0; i < State.funs.length; i++) { - map_results = []; - try { - State.funs[i](doc); - buf.push(Couch.toJSON(map_results)); - } catch (err) { - handleViewError(err, doc); - // If the error is not fatal, we treat the doc as if it - // did not emit anything, by buffering an empty array. - buf.push("[]"); - } - } - print("[" + buf.join(", ") + "]"); - } - } -})(); diff --git a/test/bench/bench_marks.js b/test/bench/bench_marks.js deleted file mode 100644 index 4025adbb..00000000 --- a/test/bench/bench_marks.js +++ /dev/null @@ -1,103 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var NUM_DOCS = 2000; -var NUM_BATCHES = 20; - -var init = function() { - var db = new CouchDB("bench_mark_db", {"X-Couch-Full-Commit": "false"}); - db.deleteDb(); - db.createDb(); - return db; -}; - -var timeit = function(func) { - var startTime = (new Date()).getTime(); - func(); - return ((new Date()).getTime() - startTime) / 1000; -}; - -var report = function(name, rate) { - rate = Math.round(parseFloat(rate) * 100) / 100; - console.log("" + name + ": " + rate + " docs/second"); -}; - -var makeDocs = function(n) { - docs = []; - for (var i=0; i < n; i++) { - docs.push({"foo":"bar"}); - }; - return docs; -}; - -var couchTests = {}; - -couchTests.single_doc_insert = function() { - var db = init(); - var len = timeit(function() { - for(var i = 0; i < NUM_DOCS; i++) { - db.save({"foo": "bar"}); - } - }); - report("Single doc inserts", NUM_DOCS/len); -}; - -couchTests.batch_ok_doc_insert = function() { - var db = init(); - var len = timeit(function() { - for(var i = 0; i < NUM_DOCS; i++) { - db.save({"foo":"bar"}, {"batch":"ok"}); - } - }); - report("Single doc inserts with batch=ok", NUM_DOCS/len); -}; - -couchTests.bulk_doc_100 = function() { - var db = init(); - var len = timeit(function() { - for(var i = 0; i < NUM_BATCHES; i++) { - db.bulkSave(makeDocs(100)); - } - }); - report("Bulk docs - 100", (NUM_BATCHES*100)/len); -}; - -couchTests.bulk_doc_1000 = function() { - var db = init(); - var len = timeit(function() { - for(var i = 0; i < NUM_BATCHES; i++) { - db.bulkSave(makeDocs(1000)); - } - }); - report("Bulk docs - 1000", (NUM_BATCHES*1000)/len); -}; - - -couchTests.bulk_doc_5000 = function() { - var db = init(); - var len = timeit(function() { - for(var i = 0; i < NUM_BATCHES; i++) { - db.bulkSave(makeDocs(5000)); - } - }); - report("Bulk docs - 5000", (NUM_BATCHES*5000)/len); -}; - -couchTests.bulk_doc_10000 = function() { - var db = init(); - var len = timeit(function() { - for(var i = 0; i < NUM_BATCHES; i++) { - db.bulkSave(makeDocs(10000)); - } - }); - report("Bulk docs - 10000", (NUM_BATCHES*10000)/len); -}; diff --git a/test/bench/benchbulk.sh b/test/bench/benchbulk.sh deleted file mode 100755 index 22804c64..00000000 --- a/test/bench/benchbulk.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/sh -e -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. -# - -# usage: time benchbulk.sh -# it takes about 30 seconds to run on my old MacBook with bulksize 1000 - -BULKSIZE=100 -DOCSIZE=10 -INSERTS=10 -ROUNDS=10 -DBURL="http://127.0.0.1:5984/benchbulk" -POSTURL="$DBURL/_bulk_docs" - -function make_bulk_docs() { - ROW=0 - SIZE=$(($1-1)) - START=$2 - BODYSIZE=$3 - - BODY=$(printf "%0${BODYSIZE}d") - - echo '{"docs":[' - while [ $ROW -lt $SIZE ]; do - printf '{"_id":"%020d", "body":"'$BODY'"},' $(($ROW + $START)) - let ROW=ROW+1 - done - printf '{"_id":"%020d", "body":"'$BODY'"}' $(($ROW + $START)) - echo ']}' -} - -echo "Making $INSERTS bulk inserts of $BULKSIZE docs each" - -echo "Attempt to delete db at $DBURL" -curl -X DELETE $DBURL -w\\n - -echo "Attempt to create db at $DBURL" -curl -X PUT $DBURL -w\\n - -echo "Running $ROUNDS rounds of $INSERTS concurrent inserts to $POSTURL" -RUN=0 -while [ $RUN -lt $ROUNDS ]; do - - POSTS=0 - while [ $POSTS -lt $INSERTS ]; do - STARTKEY=$[ POSTS * BULKSIZE + RUN * BULKSIZE * INSERTS ] - echo "startkey $STARTKEY bulksize $BULKSIZE" - DOCS=$(make_bulk_docs $BULKSIZE $STARTKEY $DOCSIZE) - # echo $DOCS - echo $DOCS | curl -T - -X POST $POSTURL -w%{http_code}\ %{time_total}\ sec\\n >/dev/null 2>&1 & - let POSTS=POSTS+1 - done - - echo "waiting" - wait - let RUN=RUN+1 -done - -curl $DBURL -w\\n diff --git a/test/bench/run.tpl b/test/bench/run.tpl deleted file mode 100755 index 9307863f..00000000 --- a/test/bench/run.tpl +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/sh -e - -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -SRC_DIR=%abs_top_srcdir% -SCRIPT_DIR=$SRC_DIR/share/www/script -JS_TEST_DIR=$SRC_DIR/test/javascript -JS_BENCH_DIR=$SRC_DIR/test/bench - -COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs - -cat $SCRIPT_DIR/json2.js \ - $SCRIPT_DIR/couch.js \ - $JS_TEST_DIR/couch_http.js \ - $JS_BENCH_DIR/bench_marks.js \ - $JS_TEST_DIR/cli_runner.js \ - | $COUCHJS - - diff --git a/test/etap/001-load.t b/test/etap/001-load.t deleted file mode 100755 index 6f49e1ba..00000000 --- a/test/etap/001-load.t +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -% Test that we can load each module. - -main(_) -> - test_util:init_code_path(), - etap:plan(37), - Modules = [ - couch_btree, - couch_config, - couch_config_writer, - couch_db, - couch_db_update_notifier, - couch_db_update_notifier_sup, - couch_db_updater, - couch_doc, - couch_event_sup, - couch_external_manager, - couch_external_server, - couch_file, - couch_httpd, - couch_httpd_db, - couch_httpd_external, - couch_httpd_misc_handlers, - couch_httpd_show, - couch_httpd_stats_handlers, - couch_httpd_view, - couch_key_tree, - couch_log, - couch_os_process, - couch_query_servers, - couch_ref_counter, - couch_rep, - couch_rep_sup, - couch_server, - couch_server_sup, - couch_stats_aggregator, - couch_stats_collector, - couch_stream, - couch_task_status, - couch_util, - couch_view, - couch_view_compactor, - couch_view_group, - couch_view_updater - ], - - lists:foreach( - fun(Module) -> - etap_can:loaded_ok( - Module, - lists:concat(["Loaded: ", Module]) - ) - end, Modules), - etap:end_tests(). diff --git a/test/etap/002-icu-driver.t b/test/etap/002-icu-driver.t deleted file mode 100644 index d70f3303..00000000 --- a/test/etap/002-icu-driver.t +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env escript -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - - -main(_) -> - test_util:init_code_path(), - etap:plan(3), - etap:is( - couch_util:start_driver("src/couchdb/priv/.libs"), - ok, - "Started couch_icu_driver." - ), - etap:is( - couch_util:collate(<<"foo">>, <<"bar">>), - 1, - "Can collate stuff" - ), - etap:is( - couch_util:collate(<<"A">>, <<"aa">>), - -1, - "Collate's non-ascii style." - ), - etap:end_tests(). diff --git a/test/etap/010-file-basics.t b/test/etap/010-file-basics.t deleted file mode 100755 index a3599f1a..00000000 --- a/test/etap/010-file-basics.t +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -filename() -> test_util:build_file("test/etap/temp.010"). - -main(_) -> - test_util:init_code_path(), - etap:plan(19), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test() -> - etap:is({error, enoent}, couch_file:open("not a real file"), - "Opening a non-existant file should return an enoent error."), - - etap:fun_is( - fun({ok, _}) -> true; (_) -> false end, - couch_file:open(filename() ++ ".1", [create, invalid_option]), - "Invalid flags to open are ignored." - ), - - {ok, Fd} = couch_file:open(filename() ++ ".0", [create, overwrite]), - etap:ok(is_pid(Fd), - "Returned file descriptor is a Pid"), - - etap:is({ok, 0}, couch_file:bytes(Fd), - "Newly created files have 0 bytes."), - - etap:is({ok, 0}, couch_file:append_term(Fd, foo), - "Appending a term returns the previous end of file position."), - - {ok, Size} = couch_file:bytes(Fd), - etap:is_greater(Size, 0, - "Writing a term increased the file size."), - - etap:is({ok, Size}, couch_file:append_binary(Fd, <<"fancy!">>), - "Appending a binary returns the current file size."), - - etap:is({ok, foo}, couch_file:pread_term(Fd, 0), - "Reading the first term returns what we wrote: foo"), - - etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size), - "Reading back the binary returns what we wrote: <<\"fancy\">>."), - - etap:is({ok, <<131, 100, 0, 3, 102, 111, 111>>}, - couch_file:pread_binary(Fd, 0), - "Reading a binary at a term position returns the term as binary." - ), - - {ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>), - etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos), - "Reading a term from a written binary term representation succeeds."), - - BigBin = list_to_binary(lists:duplicate(100000, 0)), - {ok, BigBinPos} = couch_file:append_binary(Fd, BigBin), - etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos), - "Reading a large term from a written representation succeeds."), - - ok = couch_file:write_header(Fd, hello), - etap:is({ok, hello}, couch_file:read_header(Fd), - "Reading a header succeeds."), - - {ok, BigBinPos2} = couch_file:append_binary(Fd, BigBin), - etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2), - "Reading a large term from a written representation succeeds 2."), - - % append_binary == append_iolist? - % Possible bug in pread_iolist or iolist() -> append_binary - {ok, IOLPos} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]), - etap:is({ok, [<<"foombam">>]}, couch_file:pread_iolist(Fd, IOLPos), - "Reading an results in a binary form of the written iolist()"), - - % XXX: How does on test fsync? - etap:is(ok, couch_file:sync(Fd), - "Syncing does not cause an error."), - - etap:is(ok, couch_file:truncate(Fd, Size), - "Truncating a file succeeds."), - - %etap:is(eof, (catch couch_file:pread_binary(Fd, Size)), - % "Reading data that was truncated fails.") - etap:skip(fun() -> ok end, - "No idea how to test reading beyond EOF"), - - etap:is({ok, foo}, couch_file:pread_term(Fd, 0), - "Truncating does not affect data located before the truncation mark."), - - etap:is(ok, couch_file:close(Fd), - "Files close properly."), - ok. diff --git a/test/etap/011-file-headers.t b/test/etap/011-file-headers.t deleted file mode 100755 index 4705f629..00000000 --- a/test/etap/011-file-headers.t +++ /dev/null @@ -1,145 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- -%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -filename() -> test_util:build_file("test/etap/temp.011"). -sizeblock() -> 4096. % Need to keep this in sync with couch_file.erl - -main(_) -> - test_util:init_code_path(), - {S1, S2, S3} = now(), - random:seed(S1, S2, S3), - - etap:plan(17), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test() -> - {ok, Fd} = couch_file:open(filename(), [create,overwrite]), - - etap:is({ok, 0}, couch_file:bytes(Fd), - "File should be initialized to contain zero bytes."), - - etap:is(ok, couch_file:write_header(Fd, {<<"some_data">>, 32}), - "Writing a header succeeds."), - - {ok, Size1} = couch_file:bytes(Fd), - etap:is_greater(Size1, 0, - "Writing a header allocates space in the file."), - - etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd), - "Reading the header returns what we wrote."), - - etap:is(ok, couch_file:write_header(Fd, [foo, <<"more">>]), - "Writing a second header succeeds."), - - {ok, Size2} = couch_file:bytes(Fd), - etap:is_greater(Size2, Size1, - "Writing a second header allocates more space."), - - etap:is({ok, [foo, <<"more">>]}, couch_file:read_header(Fd), - "Reading the second header does not return the first header."), - - % Delete the second header. - ok = couch_file:truncate(Fd, Size1), - - etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd), - "Reading the header after a truncation returns a previous header."), - - couch_file:write_header(Fd, [foo, <<"more">>]), - etap:is({ok, Size2}, couch_file:bytes(Fd), - "Rewriting the same second header returns the same second size."), - - ok = couch_file:close(Fd), - - % Now for the fun stuff. Try corrupting the second header and see - % if we recover properly. - - % Destroy the 0x1 byte that marks a header - check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> - etap:isnt(Expect, couch_file:read_header(CouchFd), - "Should return a different header before corruption."), - file:pwrite(RawFd, HeaderPos, <<0>>), - etap:is(Expect, couch_file:read_header(CouchFd), - "Corrupting the byte marker should read the previous header.") - end), - - % Corrupt the size. - check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> - etap:isnt(Expect, couch_file:read_header(CouchFd), - "Should return a different header before corruption."), - % +1 for 0x1 byte marker - file:pwrite(RawFd, HeaderPos+1, <<10/integer>>), - etap:is(Expect, couch_file:read_header(CouchFd), - "Corrupting the size should read the previous header.") - end), - - % Corrupt the MD5 signature - check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> - etap:isnt(Expect, couch_file:read_header(CouchFd), - "Should return a different header before corruption."), - % +5 = +1 for 0x1 byte and +4 for term size. - file:pwrite(RawFd, HeaderPos+5, <<"F01034F88D320B22">>), - etap:is(Expect, couch_file:read_header(CouchFd), - "Corrupting the MD5 signature should read the previous header.") - end), - - % Corrupt the data - check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) -> - etap:isnt(Expect, couch_file:read_header(CouchFd), - "Should return a different header before corruption."), - % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig - file:pwrite(RawFd, HeaderPos+21, <<"some data goes here!">>), - etap:is(Expect, couch_file:read_header(CouchFd), - "Corrupting the header data should read the previous header.") - end), - - ok. - -check_header_recovery(CheckFun) -> - {ok, Fd} = couch_file:open(filename(), [create,overwrite]), - {ok, RawFd} = file:open(filename(), [read, write, raw, binary]), - - {ok, _} = write_random_data(Fd), - ExpectHeader = {some_atom, <<"a binary">>, 756}, - ok = couch_file:write_header(Fd, ExpectHeader), - - {ok, HeaderPos} = write_random_data(Fd), - ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}), - - CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos), - - ok = file:close(RawFd), - ok = couch_file:close(Fd), - ok. - -write_random_data(Fd) -> - write_random_data(Fd, 100 + random:uniform(1000)). - -write_random_data(Fd, 0) -> - {ok, Bytes} = couch_file:bytes(Fd), - {ok, (1 + Bytes div sizeblock()) * sizeblock()}; -write_random_data(Fd, N) -> - Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]], - Term = lists:nth(random:uniform(4) + 1, Choices), - {ok, _} = couch_file:append_term(Fd, Term), - write_random_data(Fd, N-1). - diff --git a/test/etap/020-btree-basics.t b/test/etap/020-btree-basics.t deleted file mode 100755 index 18c4a836..00000000 --- a/test/etap/020-btree-basics.t +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- -%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -filename() -> test_util:build_file("test/etap/temp.020"). -rows() -> 250. - --record(btree, {fd, root, extract_kv, assemble_kv, less, reduce}). - -main(_) -> - test_util:init_code_path(), - etap:plan(48), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -%% @todo Determine if this number should be greater to see if the btree was -%% broken into multiple nodes. AKA "How do we appropiately detect if multiple -%% nodes were created." -test()-> - Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, rows())], - etap:ok(test_kvs(Sorted), "Testing sorted keys"), - etap:ok(test_kvs(lists:reverse(Sorted)), "Testing reversed sorted keys"), - etap:ok(test_kvs(shuffle(Sorted)), "Testing shuffled keys."), - ok. - -test_kvs(KeyValues) -> - ReduceFun = fun - (reduce, KVs) -> - length(KVs); - (rereduce, Reds) -> - lists:sum(Reds) - end, - - Keys = [K || {K, _} <- KeyValues], - - {ok, Fd} = couch_file:open(filename(), [create,overwrite]), - {ok, Btree} = couch_btree:open(nil, Fd), - etap:ok(is_record(Btree, btree), "Created btree is really a btree record"), - etap:is(Btree#btree.fd, Fd, "Btree#btree.fd is set correctly."), - etap:is(Btree#btree.root, nil, "Btree#btree.root is set correctly."), - - Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]), - etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"), - {ok, _, EmptyRes} = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0), - etap:is(EmptyRes, 0, "Folding over an empty btree"), - - {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []), - etap:ok(test_btree(Btree2, KeyValues), - "Adding all keys at once returns a complete btree."), - - etap:fun_is( - fun - ({ok, {kp_node, _}}) -> true; - (_) -> false - end, - couch_file:pread_term(Fd, element(1, Btree2#btree.root)), - "Btree root pointer is a kp_node." - ), - - {ok, Btree3} = couch_btree:add_remove(Btree2, [], Keys), - etap:ok(test_btree(Btree3, []), - "Removing all keys at once returns an empty btree."), - - Btree4 = lists:foldl(fun(KV, BtAcc) -> - {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []), - BtAcc2 - end, Btree3, KeyValues), - etap:ok(test_btree(Btree4, KeyValues), - "Adding all keys one at a time returns a complete btree."), - - Btree5 = lists:foldl(fun({K, _}, BtAcc) -> - {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]), - BtAcc2 - end, Btree4, KeyValues), - etap:ok(test_btree(Btree5, []), - "Removing all keys one at a time returns an empty btree."), - - KeyValuesRev = lists:reverse(KeyValues), - Btree6 = lists:foldl(fun(KV, BtAcc) -> - {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []), - BtAcc2 - end, Btree5, KeyValuesRev), - etap:ok(test_btree(Btree6, KeyValues), - "Adding all keys in reverse order returns a complete btree."), - - {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) -> - case Count rem 2 == 0 of - true-> {Count+1, [X | Left], Right}; - false -> {Count+1, Left, [X | Right]} - end - end, {0, [], []}, KeyValues), - - etap:ok(test_add_remove(Btree6, Rem2Keys0, Rem2Keys1), - "Add/Remove every other key."), - - etap:ok(test_add_remove(Btree6, Rem2Keys1, Rem2Keys0), - "Add/Remove opposite every other key."), - - {ok, Btree7} = couch_btree:add_remove(Btree6, [], [K||{K,_}<-Rem2Keys1]), - {ok, Btree8} = couch_btree:add_remove(Btree7, [], [K||{K,_}<-Rem2Keys0]), - etap:ok(test_btree(Btree8, []), - "Removing both halves of every other key returns an empty btree."), - - %% Third chunk (close out) - etap:is(couch_file:close(Fd), ok, "closing out"), - true. - -test_btree(Btree, KeyValues) -> - ok = test_key_access(Btree, KeyValues), - ok = test_lookup_access(Btree, KeyValues), - ok = test_final_reductions(Btree, KeyValues), - true. - -test_add_remove(Btree, OutKeyValues, RemainingKeyValues) -> - Btree2 = lists:foldl(fun({K, _}, BtAcc) -> - {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]), - BtAcc2 - end, Btree, OutKeyValues), - true = test_btree(Btree2, RemainingKeyValues), - - Btree3 = lists:foldl(fun(KV, BtAcc) -> - {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []), - BtAcc2 - end, Btree2, OutKeyValues), - true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues). - -test_key_access(Btree, List) -> - FoldFun = fun(Element, {[HAcc|TAcc], Count}) -> - case Element == HAcc of - true -> {ok, {TAcc, Count + 1}}; - _ -> {ok, {TAcc, Count + 1}} - end - end, - Length = length(List), - Sorted = lists:sort(List), - {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}), - {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun, {Sorted, 0}, [{dir, rev}]), - ok. - -test_lookup_access(Btree, KeyValues) -> - FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end, - lists:foreach(fun({Key, Value}) -> - [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]), - {ok, _, true} = couch_btree:foldl(Btree, FoldFun, {Key, Value}, [{start_key, Key}]) - end, KeyValues). - -test_final_reductions(Btree, KeyValues) -> - KVLen = length(KeyValues), - FoldLFun = fun(_X, LeadingReds, Acc) -> - CountToStart = KVLen div 3 + Acc, - CountToStart = couch_btree:final_reduce(Btree, LeadingReds), - {ok, Acc+1} - end, - FoldRFun = fun(_X, LeadingReds, Acc) -> - CountToEnd = KVLen - KVLen div 3 + Acc, - CountToEnd = couch_btree:final_reduce(Btree, LeadingReds), - {ok, Acc+1} - end, - {LStartKey, _} = case KVLen of - 0 -> {nil, nil}; - _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues)) - end, - {RStartKey, _} = case KVLen of - 0 -> {nil, nil}; - _ -> lists:nth(KVLen div 3, lists:sort(KeyValues)) - end, - {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0, [{start_key, LStartKey}]), - {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0, [{dir, rev}, {start_key, RStartKey}]), - KVLen = FoldLRed + FoldRRed, - ok. - -shuffle(List) -> - randomize(round(math:log(length(List)) + 0.5), List). - -randomize(1, List) -> - randomize(List); -randomize(T, List) -> - lists:foldl(fun(_E, Acc) -> - randomize(Acc) - end, randomize(List), lists:seq(1, (T - 1))). - -randomize(List) -> - D = lists:map(fun(A) -> - {random:uniform(), A} - end, List), - {_, D1} = lists:unzip(lists:keysort(1, D)), - D1. diff --git a/test/etap/021-btree-reductions.t b/test/etap/021-btree-reductions.t deleted file mode 100755 index 3e19c767..00000000 --- a/test/etap/021-btree-reductions.t +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- -%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -filename() -> "./test/etap/temp.021". -rows() -> 1000. - -main(_) -> - test_util:init_code_path(), - etap:plan(8), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test()-> - ReduceFun = fun - (reduce, KVs) -> length(KVs); - (rereduce, Reds) -> lists:sum(Reds) - end, - - {ok, Fd} = couch_file:open(filename(), [create,overwrite]), - {ok, Btree} = couch_btree:open(nil, Fd, [{reduce, ReduceFun}]), - - % Create a list, of {"even", Value} or {"odd", Value} pairs. - {_, EvenOddKVs} = lists:foldl(fun(Idx, {Key, Acc}) -> - case Key of - "even" -> {"odd", [{{Key, Idx}, 1} | Acc]}; - _ -> {"even", [{{Key, Idx}, 1} | Acc]} - end - end, {"odd", []}, lists:seq(1, rows())), - - {ok, Btree2} = couch_btree:add_remove(Btree, EvenOddKVs, []), - - GroupFun = fun({K1, _}, {K2, _}) -> K1 == K2 end, - FoldFun = fun(GroupedKey, Unreduced, Acc) -> - {ok, [{GroupedKey, couch_btree:final_reduce(Btree2, Unreduced)} | Acc]} - end, - - {SK1, EK1} = {{"even", -1}, {"even", foo}}, - {SK2, EK2} = {{"odd", -1}, {"odd", foo}}, - - etap:fun_is( - fun - ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) -> - true; - (_) -> - false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}]), - "Reduction works with no specified direction, startkey, or endkey." - ), - - etap:fun_is( - fun - ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) -> - true; - (_) -> - false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, fwd}]), - "Reducing forward works with no startkey or endkey." - ), - - etap:fun_is( - fun - ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) -> - true; - (_) -> - false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, rev}]), - "Reducing backwards works with no startkey or endkey." - ), - - etap:fun_is( - fun - ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) -> - true; - (_) -> - false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK2}]), - "Reducing works over the entire range with startkey and endkey set." - ), - - etap:fun_is( - fun - ({ok, [{{"even", _}, 500}]}) -> true; - (_) -> false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]), - "Reducing foward over first half works with a startkey and endkey." - ), - - etap:fun_is( - fun - ({ok, [{{"odd", _}, 500}]}) -> true; - (_) -> false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]), - "Reducing foward over second half works with second startkey and endkey" - ), - - etap:fun_is( - fun - ({ok, [{{"odd", _}, 500}]}) -> true; - (_) -> false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK2}]), - "Reducing in reverse works after swapping the startkey and endkey." - ), - - etap:fun_is( - fun - ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) -> - true; - (_) -> - false - end, - couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK1}]), - "Reducing in reverse results in reversed accumulator." - ), - - couch_file:close(Fd). diff --git a/test/etap/030-doc-from-json.t b/test/etap/030-doc-from-json.t deleted file mode 100755 index c4ef649a..00000000 --- a/test/etap/030-doc-from-json.t +++ /dev/null @@ -1,239 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- -%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -%% XXX: Figure out how to -include("couch_db.hrl") --record(doc, {id= <<"">>, revs={0, []}, body={[]}, - atts=[], deleted=false, meta=[]}). --record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data, - encoding=identity}). - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -main(_) -> - test_util:init_code_path(), - etap:plan(26), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test() -> - couch_config:start_link([default_config()]), - couch_config:set("attachments", "compression_level", "0"), - ok = test_from_json_success(), - ok = test_from_json_errors(), - ok. - -test_from_json_success() -> - Cases = [ - { - {[]}, - #doc{}, - "Return an empty document for an empty JSON object." - }, - { - {[{<<"_id">>, <<"zing!">>}]}, - #doc{id= <<"zing!">>}, - "Parses document ids." - }, - { - {[{<<"_id">>, <<"_design/foo">>}]}, - #doc{id= <<"_design/foo">>}, - "_design/document ids." - }, - { - {[{<<"_id">>, <<"_local/bam">>}]}, - #doc{id= <<"_local/bam">>}, - "_local/document ids." - }, - { - {[{<<"_rev">>, <<"4-230234">>}]}, - #doc{revs={4, [<<"230234">>]}}, - "_rev stored in revs." - }, - { - {[{<<"soap">>, 35}]}, - #doc{body={[{<<"soap">>, 35}]}}, - "Non underscore prefixed fields stored in body." - }, - { - {[{<<"_attachments">>, {[ - {<<"my_attachment.fu">>, {[ - {<<"stub">>, true}, - {<<"content_type">>, <<"application/awesome">>}, - {<<"length">>, 45} - ]}}, - {<<"noahs_private_key.gpg">>, {[ - {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>}, - {<<"content_type">>, <<"application/pgp-signature">>} - ]}} - ]}}]}, - #doc{atts=[ - #att{ - name = <<"my_attachment.fu">>, - data = stub, - type = <<"application/awesome">>, - att_len = 45, - disk_len = 45, - revpos = nil - }, - #att{ - name = <<"noahs_private_key.gpg">>, - data = <<"I have a pet fish!">>, - type = <<"application/pgp-signature">>, - att_len = 18, - disk_len = 18, - revpos = 0 - } - ]}, - "Attachments are parsed correctly." - }, - { - {[{<<"_deleted">>, true}]}, - #doc{deleted=true}, - "_deleted controls the deleted field." - }, - { - {[{<<"_deleted">>, false}]}, - #doc{}, - "{\"_deleted\": false} is ok." - }, - { - {[ - {<<"_revisions">>, {[ - {<<"start">>, 4}, - {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]} - ]}}, - {<<"_rev">>, <<"6-something">>} - ]}, - #doc{revs={4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}}, - "_revisions attribute are preferred to _rev." - }, - { - {[{<<"_revs_info">>, dropping}]}, - #doc{}, - "Drops _revs_info." - }, - { - {[{<<"_local_seq">>, dropping}]}, - #doc{}, - "Drops _local_seq." - }, - { - {[{<<"_conflicts">>, dropping}]}, - #doc{}, - "Drops _conflicts." - }, - { - {[{<<"_deleted_conflicts">>, dropping}]}, - #doc{}, - "Drops _deleted_conflicts." - } - ], - - lists:foreach(fun({EJson, Expect, Mesg}) -> - etap:is(couch_doc:from_json_obj(EJson), Expect, Mesg) - end, Cases), - ok. - -test_from_json_errors() -> - Cases = [ - { - [], - {bad_request, "Document must be a JSON object"}, - "arrays are invalid" - }, - { - 4, - {bad_request, "Document must be a JSON object"}, - "integers are invalid" - }, - { - true, - {bad_request, "Document must be a JSON object"}, - "literals are invalid" - }, - { - {[{<<"_id">>, {[{<<"foo">>, 5}]}}]}, - {bad_request, <<"Document id must be a string">>}, - "Document id must be a string." - }, - { - {[{<<"_id">>, <<"_random">>}]}, - {bad_request, - <<"Only reserved document ids may start with underscore.">>}, - "Disallow arbitrary underscore prefixed docids." - }, - { - {[{<<"_rev">>, 5}]}, - {bad_request, <<"Invalid rev format">>}, - "_rev must be a string" - }, - { - {[{<<"_rev">>, "foobar"}]}, - {bad_request, <<"Invalid rev format">>}, - "_rev must be %d-%s" - }, - { - {[{<<"_rev">>, "foo-bar"}]}, - "Error if _rev's integer expection is broken." - }, - { - {[{<<"_revisions">>, {[{<<"start">>, true}]}}]}, - {doc_validation, "_revisions.start isn't an integer."}, - "_revisions.start must be an integer." - }, - { - {[{<<"_revisions">>, {[ - {<<"start">>, 0}, - {<<"ids">>, 5} - ]}}]}, - {doc_validation, "_revisions.ids isn't a array."}, - "_revions.ids must be a list." - }, - { - {[{<<"_revisions">>, {[ - {<<"start">>, 0}, - {<<"ids">>, [5]} - ]}}]}, - {doc_validation, "RevId isn't a string"}, - "Revision ids must be strings." - }, - { - {[{<<"_something">>, 5}]}, - {doc_validation, <<"Bad special document member: _something">>}, - "Underscore prefix fields are reserved." - } - ], - - lists:foreach(fun - ({EJson, Expect, Mesg}) -> - Error = (catch couch_doc:from_json_obj(EJson)), - etap:is(Error, Expect, Mesg); - ({EJson, Mesg}) -> - try - couch_doc:from_json_obj(EJson), - etap:ok(false, "Conversion failed to raise an exception.") - catch - _:_ -> etap:ok(true, Mesg) - end - end, Cases), - ok. diff --git a/test/etap/031-doc-to-json.t b/test/etap/031-doc-to-json.t deleted file mode 100755 index 605a6d00..00000000 --- a/test/etap/031-doc-to-json.t +++ /dev/null @@ -1,200 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- -%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -%% XXX: Figure out how to -include("couch_db.hrl") --record(doc, {id= <<"">>, revs={0, []}, body={[]}, - atts=[], deleted=false, meta=[]}). --record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data, - encoding=identity}). - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -main(_) -> - test_util:init_code_path(), - etap:plan(12), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test() -> - couch_config:start_link([default_config()]), - couch_config:set("attachments", "compression_level", "0"), - ok = test_to_json_success(), - ok. - -test_to_json_success() -> - Cases = [ - { - #doc{}, - {[{<<"_id">>, <<"">>}]}, - "Empty docs are {\"_id\": \"\"}" - }, - { - #doc{id= <<"foo">>}, - {[{<<"_id">>, <<"foo">>}]}, - "_id is added." - }, - { - #doc{revs={5, ["foo"]}}, - {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]}, - "_rev is added." - }, - { - [revs], - #doc{revs={5, [<<"first">>, <<"second">>]}}, - {[ - {<<"_id">>, <<>>}, - {<<"_rev">>, <<"5-first">>}, - {<<"_revisions">>, {[ - {<<"start">>, 5}, - {<<"ids">>, [<<"first">>, <<"second">>]} - ]}} - ]}, - "_revisions include with revs option" - }, - { - #doc{body={[{<<"foo">>, <<"bar">>}]}}, - {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]}, - "Arbitrary fields are added." - }, - { - #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}}, - {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]}, - "Deleted docs no longer drop body members." - }, - { - #doc{meta=[ - {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]} - ]}, - {[ - {<<"_id">>, <<>>}, - {<<"_revs_info">>, [ - {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]}, - {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]} - ]} - ]}, - "_revs_info field is added correctly." - }, - { - #doc{meta=[{local_seq, 5}]}, - {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]}, - "_local_seq is added as an integer." - }, - { - #doc{meta=[{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]}, - {[ - {<<"_id">>, <<>>}, - {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]} - ]}, - "_conflicts is added as an array of strings." - }, - { - #doc{meta=[{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]}, - {[ - {<<"_id">>, <<>>}, - {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]} - ]}, - "_deleted_conflicsts is added as an array of strings." - }, - { - #doc{atts=[ - #att{ - name = <<"big.xml">>, - type = <<"xml/sucks">>, - data = fun() -> ok end, - revpos = 1, - att_len = 400, - disk_len = 400 - }, - #att{ - name = <<"fast.json">>, - type = <<"json/ftw">>, - data = <<"{\"so\": \"there!\"}">>, - revpos = 1, - att_len = 16, - disk_len = 16 - } - ]}, - {[ - {<<"_id">>, <<>>}, - {<<"_attachments">>, {[ - {<<"big.xml">>, {[ - {<<"content_type">>, <<"xml/sucks">>}, - {<<"revpos">>, 1}, - {<<"length">>, 400}, - {<<"stub">>, true} - ]}}, - {<<"fast.json">>, {[ - {<<"content_type">>, <<"json/ftw">>}, - {<<"revpos">>, 1}, - {<<"length">>, 16}, - {<<"stub">>, true} - ]}} - ]}} - ]}, - "Attachments attached as stubs only include a length." - }, - { - [attachments], - #doc{atts=[ - #att{ - name = <<"stuff.txt">>, - type = <<"text/plain">>, - data = fun() -> <<"diet pepsi">> end, - revpos = 1, - att_len = 10, - disk_len = 10 - }, - #att{ - name = <<"food.now">>, - type = <<"application/food">>, - revpos = 1, - data = <<"sammich">> - } - ]}, - {[ - {<<"_id">>, <<>>}, - {<<"_attachments">>, {[ - {<<"stuff.txt">>, {[ - {<<"content_type">>, <<"text/plain">>}, - {<<"revpos">>, 1}, - {<<"data">>, <<"ZGlldCBwZXBzaQ==">>} - ]}}, - {<<"food.now">>, {[ - {<<"content_type">>, <<"application/food">>}, - {<<"revpos">>, 1}, - {<<"data">>, <<"c2FtbWljaA==">>} - ]}} - ]}} - ]}, - "Attachments included inline with attachments option." - } - ], - - lists:foreach(fun - ({Doc, EJson, Mesg}) -> - etap:is(couch_doc:to_json_obj(Doc, []), EJson, Mesg); - ({Options, Doc, EJson, Mesg}) -> - etap:is(couch_doc:to_json_obj(Doc, Options), EJson, Mesg) - end, Cases), - ok. - diff --git a/test/etap/040-util.t b/test/etap/040-util.t deleted file mode 100755 index 8f80db87..00000000 --- a/test/etap/040-util.t +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - application:start(crypto), - - etap:plan(14), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - % to_existing_atom - etap:is(true, couch_util:to_existing_atom(true), "An atom is an atom."), - etap:is(foo, couch_util:to_existing_atom(<<"foo">>), - "A binary foo is the atom foo."), - etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"), - "A list of atoms is one munged atom."), - - % implode - etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"), - "use & as separator in list."), - - % trim - Strings = [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"], - etap:ok(lists:all(fun(S) -> couch_util:trim(S) == "foo" end, Strings), - "everything here trimmed should be foo."), - - % abs_pathname - {ok, Cwd} = file:get_cwd(), - etap:is(Cwd ++ "/foo", couch_util:abs_pathname("./foo"), - "foo is in this directory."), - - % should_flush - etap:ok(not couch_util:should_flush(), - "Not using enough memory to flush."), - AcquireMem = fun() -> - IntsToAGazillion = lists:seq(1, 200000), - LotsOfData = lists:map( - fun(Int) -> {Int, <<"foobar">>} end, - lists:seq(1, 500000)), - etap:ok(couch_util:should_flush(), - "Allocation 200K tuples puts us above the memory threshold.") - end, - AcquireMem(), - - etap:ok(not couch_util:should_flush(), - "Checking to flush invokes GC."), - - % verify - etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"), - "String comparison."), - etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"), - "String comparison (unequal lengths)."), - etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>), - "Binary comparison."), - etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>), - "Binary comparison (unequal lengths)."), - etap:is(false, couch_util:verify(nil, <<"ahBase3r">>), - "Binary comparison with atom."), - - ok. diff --git a/test/etap/041-uuid-gen-seq.ini b/test/etap/041-uuid-gen-seq.ini deleted file mode 100644 index 94cebc6f..00000000 --- a/test/etap/041-uuid-gen-seq.ini +++ /dev/null @@ -1,19 +0,0 @@ -; Licensed to the Apache Software Foundation (ASF) under one -; or more contributor license agreements. See the NOTICE file -; distributed with this work for additional information -; regarding copyright ownership. The ASF licenses this file -; to you under the Apache License, Version 2.0 (the -; "License"); you may not use this file except in compliance -; with the License. You may obtain a copy of the License at -; -; http://www.apache.org/licenses/LICENSE-2.0 -; -; Unless required by applicable law or agreed to in writing, -; software distributed under the License is distributed on an -; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -; KIND, either express or implied. See the License for the -; specific language governing permissions and limitations -; under the License. - -[uuids] -algorithm = sequential diff --git a/test/etap/041-uuid-gen-utc.ini b/test/etap/041-uuid-gen-utc.ini deleted file mode 100644 index c2b83831..00000000 --- a/test/etap/041-uuid-gen-utc.ini +++ /dev/null @@ -1,19 +0,0 @@ -; Licensed to the Apache Software Foundation (ASF) under one -; or more contributor license agreements. See the NOTICE file -; distributed with this work for additional information -; regarding copyright ownership. The ASF licenses this file -; to you under the Apache License, Version 2.0 (the -; "License"); you may not use this file except in compliance -; with the License. You may obtain a copy of the License at -; -; http://www.apache.org/licenses/LICENSE-2.0 -; -; Unless required by applicable law or agreed to in writing, -; software distributed under the License is distributed on an -; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -; KIND, either express or implied. See the License for the -; specific language governing permissions and limitations -; under the License. - -[uuids] -algorithm = utc_random diff --git a/test/etap/041-uuid-gen.t b/test/etap/041-uuid-gen.t deleted file mode 100755 index 1e6aa9ee..00000000 --- a/test/etap/041-uuid-gen.t +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -seq_alg_config() -> - test_util:source_file("test/etap/041-uuid-gen-seq.ini"). - -utc_alg_config() -> - test_util:source_file("test/etap/041-uuid-gen-utc.ini"). - -% Run tests and wait for the gen_servers to shutdown -run_test(IniFiles, Test) -> - {ok, Pid} = couch_config:start_link(IniFiles), - erlang:monitor(process, Pid), - couch_uuids:start(), - Test(), - couch_uuids:stop(), - couch_config:stop(), - receive - {'DOWN', _, _, Pid, _} -> ok; - _Other -> etap:diag("OTHER: ~p~n", [_Other]) - after - 1000 -> throw({timeout_error, config_stop}) - end. - -main(_) -> - test_util:init_code_path(), - application:start(crypto), - etap:plan(6), - - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - - TestUnique = fun() -> - etap:is( - test_unique(10000, couch_uuids:new()), - true, - "Can generate 10K unique IDs" - ) - end, - run_test([default_config()], TestUnique), - run_test([default_config(), seq_alg_config()], TestUnique), - run_test([default_config(), utc_alg_config()], TestUnique), - - TestMonotonic = fun () -> - etap:is( - couch_uuids:new() < couch_uuids:new(), - true, - "should produce monotonically increasing ids" - ) - end, - run_test([default_config(), seq_alg_config()], TestMonotonic), - run_test([default_config(), utc_alg_config()], TestMonotonic), - - % Pretty sure that the average of a uniform distribution is the - % midpoint of the range. Thus, to exceed a threshold, we need - % approximately Total / (Range/2 + RangeMin) samples. - % - % In our case this works out to be 8194. (0xFFF000 / 0x7FF) - % These tests just fudge the limits for a good generator at 25% - % in either direction. Technically it should be possible to generate - % bounds that will show if your random number generator is not - % sufficiently random but I hated statistics in school. - TestRollOver = fun() -> - UUID = binary_to_list(couch_uuids:new()), - Prefix = element(1, lists:split(26, UUID)), - N = gen_until_pref_change(Prefix,0), - etap:diag("N is: ~p~n",[N]), - etap:is( - N >= 5000 andalso N =< 11000, - true, - "should roll over every so often." - ) - end, - run_test([default_config(), seq_alg_config()], TestRollOver). - -test_unique(0, _) -> - true; -test_unique(N, UUID) -> - case couch_uuids:new() of - UUID -> - etap:diag("N: ~p~n", [N]), - false; - Else -> test_unique(N-1, Else) - end. - -get_prefix(UUID) -> - element(1, lists:split(26, binary_to_list(UUID))). - -gen_until_pref_change(_, Count) when Count > 8251 -> - Count; -gen_until_pref_change(Prefix, N) -> - case get_prefix(couch_uuids:new()) of - Prefix -> gen_until_pref_change(Prefix, N+1); - _ -> N - end. diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t deleted file mode 100755 index 545dd524..00000000 --- a/test/etap/050-stream.t +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(13), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -read_all(Fd, PosList) -> - Data = couch_stream:foldl(Fd, PosList, fun(Bin, Acc) -> [Bin, Acc] end, []), - iolist_to_binary(Data). - -test() -> - {ok, Fd} = couch_file:open("test/etap/temp.050", [create,overwrite]), - {ok, Stream} = couch_stream:open(Fd), - - etap:is(ok, couch_stream:write(Stream, <<"food">>), - "Writing to streams works."), - - etap:is(ok, couch_stream:write(Stream, <<"foob">>), - "Consecutive writing to streams works."), - - etap:is(ok, couch_stream:write(Stream, <<>>), - "Writing an empty binary does nothing."), - - {Ptrs, Length, _, _, _} = couch_stream:close(Stream), - etap:is(Ptrs, [0], "Close returns the file pointers."), - etap:is(Length, 8, "Close also returns the number of bytes written."), - etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."), - - % Remeber where we expect the pointer to be. - {ok, ExpPtr} = couch_file:bytes(Fd), - {ok, Stream2} = couch_stream:open(Fd), - OneBits = <<1:(8*10)>>, - etap:is(ok, couch_stream:write(Stream2, OneBits), - "Successfully wrote 80 1 bits."), - - ZeroBits = <<0:(8*10)>>, - etap:is(ok, couch_stream:write(Stream2, ZeroBits), - "Successfully wrote 80 0 bits."), - - {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2), - etap:is(Ptrs2, [ExpPtr], "Closing stream returns the file pointers."), - etap:is(Length2, 20, "Length written is 160 bytes."), - - AllBits = iolist_to_binary([OneBits,ZeroBits]), - etap:is(AllBits, read_all(Fd, Ptrs2), "Returned pointers are valid."), - - % Stream more the 4K chunk size. - {ok, ExpPtr2} = couch_file:bytes(Fd), - {ok, Stream3} = couch_stream:open(Fd), - Acc2 = lists:foldl(fun(_, Acc) -> - Data = <<"a1b2c">>, - couch_stream:write(Stream3, Data), - [Data | Acc] - end, [], lists:seq(1, 1024)), - {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3), - - % 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold) - % + 5 puts us over the threshold - % + 4 bytes for the term_to_binary adding a length header - % + 1 byte every 4K for tail append headers - SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1, - etap:is(Ptrs3, [ExpPtr2, SecondPtr], "Pointers every 4K bytes."), - etap:is(Length3, 5120, "Wrote the expected 5K bytes."), - - couch_file:close(Fd), - ok. diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t deleted file mode 100755 index d6b13d6d..00000000 --- a/test/etap/060-kt-merging.t +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(16), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - EmptyTree = [], - One = [{0, {"1","foo",[]}}], - TwoSibs = [{0, {"1","foo",[]}}, - {0, {"2","foo",[]}}], - OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}], - TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}], - TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, - {"1b", "bar", []}]}}], - TwoChildSibs2 = [{0, {"1","foo", [{"1a", "bar", []}, - {"1b", "bar", [{"1bb", "boo", []}]}]}}], - Stemmed1b = [{1, {"1a", "bar", []}}], - Stemmed1a = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], - Stemmed1aa = [{2, {"1aa", "bar", []}}], - Stemmed1bb = [{2, {"1bb", "boo", []}}], - - etap:is( - {EmptyTree, no_conflicts}, - couch_key_tree:merge(EmptyTree, EmptyTree), - "Merging two empty trees yields an empty tree." - ), - - etap:is( - {One, no_conflicts}, - couch_key_tree:merge(EmptyTree, One), - "The empty tree is the identity for merge." - ), - - etap:is( - {One, no_conflicts}, - couch_key_tree:merge(One, EmptyTree), - "Merging is commutative." - ), - - etap:is( - {TwoSibs, no_conflicts}, - couch_key_tree:merge(One, TwoSibs), - "Merging a prefix of a tree with the tree yields the tree." - ), - - etap:is( - {One, no_conflicts}, - couch_key_tree:merge(One, One), - "Merging is reflexive." - ), - - etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(TwoChild, TwoChild), - "Merging two children is still reflexive." - ), - - etap:is( - {TwoChildSibs, no_conflicts}, - couch_key_tree:merge(TwoChildSibs, TwoChildSibs), - "Merging a tree to itself is itself."), - - etap:is( - {TwoChildSibs, no_conflicts}, - couch_key_tree:merge(TwoChildSibs, Stemmed1b), - "Merging a tree with a stem." - ), - - etap:is( - {TwoChildSibs, no_conflicts}, - couch_key_tree:merge(Stemmed1b, TwoChildSibs), - "Merging in the opposite direction." - ), - - etap:is( - {TwoChildSibs2, no_conflicts}, - couch_key_tree:merge(TwoChildSibs2, Stemmed1bb), - "Merging a stem at a deeper level." - ), - - etap:is( - {TwoChildSibs2, no_conflicts}, - couch_key_tree:merge(Stemmed1bb, TwoChildSibs2), - "Merging a deeper level in opposite order." - ), - - etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(TwoChild, Stemmed1aa), - "Merging a single tree with a deeper stem." - ), - - etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(TwoChild, Stemmed1a), - "Merging a larger stem." - ), - - etap:is( - {Stemmed1a, no_conflicts}, - couch_key_tree:merge(Stemmed1a, Stemmed1aa), - "More merging." - ), - - Expect1 = OneChild ++ Stemmed1aa, - etap:is( - {Expect1, conflicts}, - couch_key_tree:merge(OneChild, Stemmed1aa), - "Merging should create conflicts." - ), - - etap:is( - {TwoChild, no_conflicts}, - couch_key_tree:merge(Expect1, TwoChild), - "Merge should have no conflicts." - ), - - ok. diff --git a/test/etap/061-kt-missing-leaves.t b/test/etap/061-kt-missing-leaves.t deleted file mode 100755 index d60b4db8..00000000 --- a/test/etap/061-kt-missing-leaves.t +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(4), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], - Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], - Stemmed2 = [{2, {"1aa", "bar", []}}], - - etap:is( - [], - couch_key_tree:find_missing(TwoChildSibs, [{0,"1"}, {1,"1a"}]), - "Look for missing keys." - ), - - etap:is( - [{0, "10"}, {100, "x"}], - couch_key_tree:find_missing( - TwoChildSibs, - [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}] - ), - "Look for missing keys." - ), - - etap:is( - [{0, "1"}, {100, "x"}], - couch_key_tree:find_missing( - Stemmed1, - [{0,"1"}, {1,"1a"}, {100, "x"}] - ), - "Look for missing keys." - ), - etap:is( - [{0, "1"}, {1,"1a"}, {100, "x"}], - couch_key_tree:find_missing( - Stemmed2, - [{0,"1"}, {1,"1a"}, {100, "x"}] - ), - "Look for missing keys." - ), - - ok. diff --git a/test/etap/062-kt-remove-leaves.t b/test/etap/062-kt-remove-leaves.t deleted file mode 100755 index 745a00be..00000000 --- a/test/etap/062-kt-remove-leaves.t +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(6), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}], - TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], - Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], - - etap:is( - {TwoChildSibs, []}, - couch_key_tree:remove_leafs(TwoChildSibs, []), - "Removing no leaves has no effect on the tree." - ), - - etap:is( - {TwoChildSibs, []}, - couch_key_tree:remove_leafs(TwoChildSibs, [{0, "1"}]), - "Removing a non-existant branch has no effect." - ), - - etap:is( - {OneChild, [{1, "1b"}]}, - couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1b"}]), - "Removing a leaf removes the leaf." - ), - - etap:is( - {[], [{1, "1b"},{1, "1a"}]}, - couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1a"}, {1, "1b"}]), - "Removing all leaves returns an empty tree." - ), - - etap:is( - {Stemmed, []}, - couch_key_tree:remove_leafs(Stemmed, [{1, "1a"}]), - "Removing a non-existant node has no effect." - ), - - etap:is( - {[], [{2, "1aa"}]}, - couch_key_tree:remove_leafs(Stemmed, [{2, "1aa"}]), - "Removing the last leaf returns an empty tree." - ), - - ok. diff --git a/test/etap/063-kt-get-leaves.t b/test/etap/063-kt-get-leaves.t deleted file mode 100755 index 6d4e8007..00000000 --- a/test/etap/063-kt-get-leaves.t +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(11), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], - Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], - - etap:is( - {[{"foo", {0, ["1"]}}],[]}, - couch_key_tree:get(TwoChildSibs, [{0, "1"}]), - "extract a subtree." - ), - - etap:is( - {[{"bar", {1, ["1a", "1"]}}],[]}, - couch_key_tree:get(TwoChildSibs, [{1, "1a"}]), - "extract a subtree." - ), - - etap:is( - {[],[{0,"x"}]}, - couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}]), - "gather up the leaves." - ), - - etap:is( - {[{"bar", {1, ["1a","1"]}}],[]}, - couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}]), - "gather up the leaves." - ), - - etap:is( - {[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]}, - couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}]), - "gather up the leaves." - ), - - etap:is( - {[{0,[{"1", "foo"}]}],[]}, - couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}]), - "retrieve full key paths." - ), - - etap:is( - {[{1,[{"1a", "bar"},{"1", "foo"}]}],[]}, - couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}]), - "retrieve full key paths." - ), - - etap:is( - [{2, [{"1aa", "bar"},{"1a", "bar"}]}], - couch_key_tree:get_all_leafs_full(Stemmed), - "retrieve all leaves." - ), - - etap:is( - [{1, [{"1a", "bar"},{"1", "foo"}]}, {1, [{"1b", "bar"},{"1", "foo"}]}], - couch_key_tree:get_all_leafs_full(TwoChildSibs), - "retrieve all the leaves." - ), - - etap:is( - [{"bar", {2, ["1aa","1a"]}}], - couch_key_tree:get_all_leafs(Stemmed), - "retrieve all leaves." - ), - - etap:is( - [{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}], - couch_key_tree:get_all_leafs(TwoChildSibs), - "retrieve all the leaves." - ), - - ok. diff --git a/test/etap/064-kt-counting.t b/test/etap/064-kt-counting.t deleted file mode 100755 index f182d287..00000000 --- a/test/etap/064-kt-counting.t +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(4), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - EmptyTree = [], - One = [{0, {"1","foo",[]}}], - TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}], - Stemmed = [{2, {"1bb", "boo", []}}], - - etap:is(0, couch_key_tree:count_leafs(EmptyTree), - "Empty trees have no leaves."), - - etap:is(1, couch_key_tree:count_leafs(One), - "Single node trees have a single leaf."), - - etap:is(2, couch_key_tree:count_leafs(TwoChildSibs), - "Two children siblings counted as two leaves."), - - etap:is(1, couch_key_tree:count_leafs(Stemmed), - "Stemming does not affect leaf counting."), - - ok. diff --git a/test/etap/065-kt-stemming.t b/test/etap/065-kt-stemming.t deleted file mode 100755 index 6e781c1d..00000000 --- a/test/etap/065-kt-stemming.t +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(3), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}], - Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}], - Stemmed2 = [{2, {"1aa", "bar", []}}], - - etap:is(TwoChild, couch_key_tree:stem(TwoChild, 3), - "Stemming more levels than what exists does nothing."), - - etap:is(Stemmed1, couch_key_tree:stem(TwoChild, 2), - "Stemming with a depth of two returns the deepest two nodes."), - - etap:is(Stemmed2, couch_key_tree:stem(TwoChild, 1), - "Stemming to a depth of one returns the deepest node."), - - ok. diff --git a/test/etap/070-couch-db.t b/test/etap/070-couch-db.t deleted file mode 100755 index 4b14aba6..00000000 --- a/test/etap/070-couch-db.t +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - - etap:plan(4), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - - couch_server_sup:start_link( - ["etc/couchdb/default_dev.ini", "etc/couchdb/local_dev.ini"] - ), - - couch_db:create(<<"etap-test-db">>, []), - {ok, AllDbs} = couch_server:all_databases(), - etap:ok(lists:member(<<"etap-test-db">>, AllDbs), "Database was created."), - - couch_server:delete(<<"etap-test-db">>, []), - {ok, AllDbs2} = couch_server:all_databases(), - etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2), - "Database was deleted."), - - gen_server:call(couch_server, {set_max_dbs_open, 3}), - MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end, - - lists:foreach(fun(Int) -> - {ok, TestDbs} = couch_server:all_databases(), - ok = case lists:member(MkDbName(Int), TestDbs) of - true -> couch_server:delete(MkDbName(Int), []); - _ -> ok - end, - {ok, Db} = couch_db:create(MkDbName(Int), []), - ok = couch_db:close(Db) - end, lists:seq(1, 6)), - - {ok, AllDbs3} = couch_server:all_databases(), - NumCreated = lists:foldl(fun(Int, Acc) -> - true = lists:member(MkDbName(Int), AllDbs3), - Acc+1 - end, 0, lists:seq(1, 6)), - etap:is(6, NumCreated, "Created all databases."), - - lists:foreach(fun(Int) -> - ok = couch_server:delete(MkDbName(Int), []) - end, lists:seq(1, 6)), - - {ok, AllDbs4} = couch_server:all_databases(), - NumDeleted = lists:foldl(fun(Int, Acc) -> - false = lists:member(MkDbName(Int), AllDbs4), - Acc+1 - end, 0, lists:seq(1, 6)), - etap:is(6, NumDeleted, "Deleted all databases."), - - ok. diff --git a/test/etap/080-config-get-set.t b/test/etap/080-config-get-set.t deleted file mode 100755 index a4a8577a..00000000 --- a/test/etap/080-config-get-set.t +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -main(_) -> - test_util:init_code_path(), - etap:plan(12), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - % start couch_config with default - couch_config:start_link([default_config()]), - - - % Check that we can get values - - - etap:fun_is( - fun(List) -> length(List) > 0 end, - couch_config:all(), - "Data was loaded from the INI file." - ), - - etap:fun_is( - fun(List) -> length(List) > 0 end, - couch_config:get("daemons"), - "There are settings in the [daemons] section of the INI file." - ), - - etap:is( - couch_config:get("httpd_design_handlers", "_view"), - "{couch_httpd_view, handle_view_req}", - "The {httpd_design_handlers, view} is the expected default." - ), - - etap:is( - couch_config:get("httpd", "foo", "bar"), - "bar", - "Returns the default when key doesn't exist in config." - ), - - etap:is( - couch_config:get("httpd", "foo"), - undefined, - "The default default is the atom 'undefined'." - ), - - etap:is( - couch_config:get("httpd", "port", "bar"), - "5984", - "Only returns the default when the config setting does not exist." - ), - - - % Check that setting values works. - - - ok = couch_config:set("log", "level", "severe", false), - - etap:is( - couch_config:get("log", "level"), - "severe", - "Non persisted changes take effect." - ), - - etap:is( - couch_config:get("new_section", "bizzle"), - undefined, - "Section 'new_section' does not exist." - ), - - ok = couch_config:set("new_section", "bizzle", "bang", false), - - etap:is( - couch_config:get("new_section", "bizzle"), - "bang", - "New section 'new_section' was created for a new key/value pair." - ), - - - % Check that deleting works - - - ok = couch_config:delete("new_section", "bizzle", false), - etap:is( - couch_config:get("new_section", "bizzle"), - undefined, - "Deleting sets the value to \"\"" - ), - - - % Check ge/set/delete binary strings - - ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false), - etap:is( - couch_config:get(<<"foo">>, <<"bar">>), - <<"baz">>, - "Can get and set with binary section and key values." - ), - ok = couch_config:delete(<<"foo">>, <<"bar">>, false), - etap:is( - couch_config:get(<<"foo">>, <<"bar">>), - undefined, - "Deleting with binary section/key pairs sets the value to \"\"" - ), - - ok. diff --git a/test/etap/081-config-override.1.ini b/test/etap/081-config-override.1.ini deleted file mode 100644 index 55451dad..00000000 --- a/test/etap/081-config-override.1.ini +++ /dev/null @@ -1,22 +0,0 @@ -; Licensed to the Apache Software Foundation (ASF) under one -; or more contributor license agreements. See the NOTICE file -; distributed with this work for additional information -; regarding copyright ownership. The ASF licenses this file -; to you under the Apache License, Version 2.0 (the -; "License"); you may not use this file except in compliance -; with the License. You may obtain a copy of the License at -; -; http://www.apache.org/licenses/LICENSE-2.0 -; -; Unless required by applicable law or agreed to in writing, -; software distributed under the License is distributed on an -; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -; KIND, either express or implied. See the License for the -; specific language governing permissions and limitations -; under the License. - -[couchdb] -max_dbs_open=10 - -[httpd] -port=4895 diff --git a/test/etap/081-config-override.2.ini b/test/etap/081-config-override.2.ini deleted file mode 100644 index 5f46357f..00000000 --- a/test/etap/081-config-override.2.ini +++ /dev/null @@ -1,22 +0,0 @@ -; Licensed to the Apache Software Foundation (ASF) under one -; or more contributor license agreements. See the NOTICE file -; distributed with this work for additional information -; regarding copyright ownership. The ASF licenses this file -; to you under the Apache License, Version 2.0 (the -; "License"); you may not use this file except in compliance -; with the License. You may obtain a copy of the License at -; -; http://www.apache.org/licenses/LICENSE-2.0 -; -; Unless required by applicable law or agreed to in writing, -; software distributed under the License is distributed on an -; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -; KIND, either express or implied. See the License for the -; specific language governing permissions and limitations -; under the License. - -[httpd] -port = 80 - -[fizbang] -unicode = normalized diff --git a/test/etap/081-config-override.t b/test/etap/081-config-override.t deleted file mode 100755 index 01f8b4c2..00000000 --- a/test/etap/081-config-override.t +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -local_config_1() -> - test_util:source_file("test/etap/081-config-override.1.ini"). - -local_config_2() -> - test_util:source_file("test/etap/081-config-override.2.ini"). - -local_config_write() -> - test_util:build_file("test/etap/temp.081"). - -% Run tests and wait for the config gen_server to shutdown. -run_tests(IniFiles, Tests) -> - {ok, Pid} = couch_config:start_link(IniFiles), - erlang:monitor(process, Pid), - Tests(), - couch_config:stop(), - receive - {'DOWN', _, _, Pid, _} -> ok; - _Other -> etap:diag("OTHER: ~p~n", [_Other]) - after - 1000 -> throw({timeout_error, config_stop}) - end. - -main(_) -> - test_util:init_code_path(), - etap:plan(17), - - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - - CheckStartStop = fun() -> ok end, - run_tests([default_config()], CheckStartStop), - - CheckDefaults = fun() -> - etap:is( - couch_config:get("couchdb", "max_dbs_open"), - "100", - "{couchdb, max_dbs_open} is 100 by defualt." - ), - - etap:is( - couch_config:get("httpd","port"), - "5984", - "{httpd, port} is 5984 by default" - ), - - etap:is( - couch_config:get("fizbang", "unicode"), - undefined, - "{fizbang, unicode} is undefined by default" - ) - end, - - run_tests([default_config()], CheckDefaults), - - - % Check that subsequent files override values appropriately - - CheckOverride = fun() -> - etap:is( - couch_config:get("couchdb", "max_dbs_open"), - "10", - "{couchdb, max_dbs_open} was overriden with the value 10" - ), - - etap:is( - couch_config:get("httpd", "port"), - "4895", - "{httpd, port} was overriden with the value 4895" - ) - end, - - run_tests([default_config(), local_config_1()], CheckOverride), - - - % Check that overrides can create new sections - - CheckOverride2 = fun() -> - etap:is( - couch_config:get("httpd", "port"), - "80", - "{httpd, port} is overriden with the value 80" - ), - - etap:is( - couch_config:get("fizbang", "unicode"), - "normalized", - "{fizbang, unicode} was created by override INI file" - ) - end, - - run_tests([default_config(), local_config_2()], CheckOverride2), - - - % Check that values can be overriden multiple times - - CheckOverride3 = fun() -> - etap:is( - couch_config:get("httpd", "port"), - "80", - "{httpd, port} value was taken from the last specified INI file." - ) - end, - - run_tests( - [default_config(), local_config_1(), local_config_2()], - CheckOverride3 - ), - - % Check persistence to last file. - - % Empty the file in case it exists. - {ok, Fd} = file:open(local_config_write(), write), - ok = file:truncate(Fd), - ok = file:close(Fd), - - % Open and write a value - CheckCanWrite = fun() -> - etap:is( - couch_config:get("httpd", "port"), - "5984", - "{httpd, port} is still 5984 by default" - ), - - etap:is( - couch_config:set("httpd", "port", "8080"), - ok, - "Writing {httpd, port} is kosher." - ), - - etap:is( - couch_config:get("httpd", "port"), - "8080", - "{httpd, port} was updated to 8080 successfully." - ), - - etap:is( - couch_config:delete("httpd", "bind_address"), - ok, - "Deleting {httpd, bind_address} succeeds" - ), - - etap:is( - couch_config:get("httpd", "bind_address"), - undefined, - "{httpd, bind_address} was actually deleted." - ) - end, - - run_tests([default_config(), local_config_write()], CheckCanWrite), - - % Open and check where we don't expect persistence. - - CheckDidntWrite = fun() -> - etap:is( - couch_config:get("httpd", "port"), - "5984", - "{httpd, port} was not persisted to the primary INI file." - ), - - etap:is( - couch_config:get("httpd", "bind_address"), - "127.0.0.1", - "{httpd, bind_address} was not deleted form the primary INI file." - ) - end, - - run_tests([default_config()], CheckDidntWrite), - - % Open and check we have only the persistence we expect. - CheckDidWrite = fun() -> - etap:is( - couch_config:get("httpd", "port"), - "8080", - "{httpd, port} is still 8080 after reopening the config." - ), - - etap:is( - couch_config:get("httpd", "bind_address"), - undefined, - "{httpd, bind_address} is still \"\" after reopening." - ) - end, - - run_tests([local_config_write()], CheckDidWrite), - - ok. diff --git a/test/etap/082-config-register.t b/test/etap/082-config-register.t deleted file mode 100755 index 191ba8f8..00000000 --- a/test/etap/082-config-register.t +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -main(_) -> - test_util:init_code_path(), - etap:plan(5), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_config:start_link([default_config()]), - - etap:is( - couch_config:get("httpd", "port"), - "5984", - "{httpd, port} is 5984 by default." - ), - - ok = couch_config:set("httpd", "port", "4895", false), - - etap:is( - couch_config:get("httpd", "port"), - "4895", - "{httpd, port} changed to 4895" - ), - - SentinelFunc = fun() -> - % Ping/Pong to make sure we wait for this - % process to die - receive {ping, From} -> From ! pong end - end, - SentinelPid = spawn(SentinelFunc), - - couch_config:register( - fun("httpd", "port", Value) -> - etap:is(Value, "8080", "Registered function got notification.") - end, - SentinelPid - ), - - ok = couch_config:set("httpd", "port", "8080", false), - - % Implicitly checking that we *don't* call the function - etap:is( - couch_config:get("httpd", "bind_address"), - "127.0.0.1", - "{httpd, bind_address} is not '0.0.0.0'" - ), - ok = couch_config:set("httpd", "bind_address", "0.0.0.0", false), - - % Ping-Pong kill process - SentinelPid ! {ping, self()}, - receive - _Any -> ok - after 1000 -> - throw({timeout_error, registered_pid}) - end, - - ok = couch_config:set("httpd", "port", "80", false), - etap:is( - couch_config:get("httpd", "port"), - "80", - "Implicitly test that the function got de-registered" - ), - - % test passing of Persist flag - couch_config:register( - fun("httpd", _, _, Persist) -> - etap:is(Persist, false) - end), - ok = couch_config:set("httpd", "port", "80", false), - - ok. diff --git a/test/etap/083-config-no-files.t b/test/etap/083-config-no-files.t deleted file mode 100755 index 675feb59..00000000 --- a/test/etap/083-config-no-files.t +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -main(_) -> - test_util:init_code_path(), - etap:plan(3), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_config:start_link([]), - - etap:fun_is( - fun(KVPairs) -> length(KVPairs) == 0 end, - couch_config:all(), - "No INI files specified returns 0 key/value pairs." - ), - - ok = couch_config:set("httpd", "port", "80", false), - - etap:is( - couch_config:get("httpd", "port"), - "80", - "Created a new non-persisted k/v pair." - ), - - ok = couch_config:set("httpd", "bind_address", "127.0.0.1"), - etap:is( - couch_config:get("httpd", "bind_address"), - "127.0.0.1", - "Asking for a persistent key/value pair doesn't choke." - ), - - ok. diff --git a/test/etap/090-task-status.t b/test/etap/090-task-status.t deleted file mode 100755 index b278de7f..00000000 --- a/test/etap/090-task-status.t +++ /dev/null @@ -1,209 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(16), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -check_status(Pid,ListPropLists) -> - From = list_to_binary(pid_to_list(Pid)), - Element = lists:foldl( - fun(PropList,Acc) -> - case couch_util:get_value(pid,PropList) of - From -> - [PropList | Acc]; - _ -> - [] - end - end, - [], ListPropLists - ), - couch_util:get_value(status,hd(Element)). - -loop() -> - receive - {add, From} -> - Resp = couch_task_status:add_task("type", "task", "init"), - From ! {ok, self(), Resp}, - loop(); - {update, Status, From} -> - Resp = couch_task_status:update(Status), - From ! {ok, self(), Resp}, - loop(); - {update_frequency, Msecs, From} -> - Resp = couch_task_status:set_update_frequency(Msecs), - From ! {ok, self(), Resp}, - loop(); - {done, From} -> - From ! {ok, self(), ok} - end. - -call(Pid, Command) -> - Pid ! {Command, self()}, - wait(Pid). - -call(Pid, Command, Arg) -> - Pid ! {Command, Arg, self()}, - wait(Pid). - -wait(Pid) -> - receive - {ok, Pid, Msg} -> Msg - after 1000 -> - throw(timeout_error) - end. - -test() -> - {ok, TaskStatusPid} = couch_task_status:start_link(), - - TaskUpdater = fun() -> loop() end, - % create three updaters - Pid1 = spawn(TaskUpdater), - Pid2 = spawn(TaskUpdater), - Pid3 = spawn(TaskUpdater), - - ok = call(Pid1, add), - etap:is( - length(couch_task_status:all()), - 1, - "Started a task" - ), - - etap:is( - call(Pid1, add), - {add_task_error, already_registered}, - "Unable to register multiple tasks for a single Pid." - ), - - etap:is( - check_status(Pid1, couch_task_status:all()), - <<"init">>, - "Task status was set to 'init'." - ), - - call(Pid1,update,"running"), - etap:is( - check_status(Pid1,couch_task_status:all()), - <<"running">>, - "Status updated to 'running'." - ), - - - call(Pid2,add), - etap:is( - length(couch_task_status:all()), - 2, - "Started a second task." - ), - - etap:is( - check_status(Pid2, couch_task_status:all()), - <<"init">>, - "Second tasks's status was set to 'init'." - ), - - call(Pid2, update, "running"), - etap:is( - check_status(Pid2, couch_task_status:all()), - <<"running">>, - "Second task's status updated to 'running'." - ), - - - call(Pid3, add), - etap:is( - length(couch_task_status:all()), - 3, - "Registered a third task." - ), - - etap:is( - check_status(Pid3, couch_task_status:all()), - <<"init">>, - "Third tasks's status was set to 'init'." - ), - - call(Pid3, update, "running"), - etap:is( - check_status(Pid3, couch_task_status:all()), - <<"running">>, - "Third task's status updated to 'running'." - ), - - - call(Pid3, update_frequency, 500), - call(Pid3, update, "still running"), - etap:is( - check_status(Pid3, couch_task_status:all()), - <<"still running">>, - "Third task's status updated to 'still running'." - ), - - call(Pid3, update, "skip this update"), - etap:is( - check_status(Pid3, couch_task_status:all()), - <<"still running">>, - "Status update dropped because of frequency limit." - ), - - call(Pid3, update_frequency, 0), - call(Pid3, update, "don't skip"), - etap:is( - check_status(Pid3, couch_task_status:all()), - <<"don't skip">>, - "Status updated after reseting frequency limit." - ), - - - call(Pid1, done), - etap:is( - length(couch_task_status:all()), - 2, - "First task finished." - ), - - call(Pid2, done), - etap:is( - length(couch_task_status:all()), - 1, - "Second task finished." - ), - - call(Pid3, done), - etap:is( - length(couch_task_status:all()), - 0, - "Third task finished." - ), - - erlang:monitor(process, TaskStatusPid), - couch_task_status:stop(), - receive - {'DOWN', _, _, TaskStatusPid, _} -> - ok - after - 1000 -> - throw(timeout_error) - end, - - ok. diff --git a/test/etap/100-ref-counter.t b/test/etap/100-ref-counter.t deleted file mode 100755 index 8f996d04..00000000 --- a/test/etap/100-ref-counter.t +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(8), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -loop() -> - receive - close -> ok - end. - -wait() -> - receive - {'DOWN', _, _, _, _} -> ok - after 1000 -> - throw(timeout_error) - end. - -test() -> - {ok, RefCtr} = couch_ref_counter:start([]), - - etap:is( - couch_ref_counter:count(RefCtr), - 1, - "A ref_counter is initialized with the calling process as a referer." - ), - - ChildPid1 = spawn(fun() -> loop() end), - - % This is largely implicit in that nothing else breaks - % as ok is just returned from gen_server:cast() - etap:is( - couch_ref_counter:drop(RefCtr, ChildPid1), - ok, - "Dropping an unknown Pid is ignored." - ), - - couch_ref_counter:add(RefCtr, ChildPid1), - etap:is( - couch_ref_counter:count(RefCtr), - 2, - "Adding a Pid to the ref_counter increases it's count." - ), - - couch_ref_counter:add(RefCtr, ChildPid1), - etap:is( - couch_ref_counter:count(RefCtr), - 2, - "Readding the same Pid maintains the count but increments it's refs." - ), - - couch_ref_counter:drop(RefCtr, ChildPid1), - etap:is( - couch_ref_counter:count(RefCtr), - 2, - "Droping the doubly added Pid only removes a ref, not a referer." - ), - - couch_ref_counter:drop(RefCtr, ChildPid1), - etap:is( - couch_ref_counter:count(RefCtr), - 1, - "Dropping the second ref drops the referer." - ), - - couch_ref_counter:add(RefCtr, ChildPid1), - etap:is( - couch_ref_counter:count(RefCtr), - 2, - "Sanity checking that the Pid was re-added." - ), - - erlang:monitor(process, ChildPid1), - ChildPid1 ! close, - wait(), - - CheckFun = fun - (Iter, nil) -> - case couch_ref_counter:count(RefCtr) of - 1 -> Iter; - _ -> nil - end; - (_, Acc) -> - Acc - end, - Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)), - etap:isnt( - Result, - nil, - "The referer count was decremented automatically on process exit." - ), - - ok. diff --git a/test/etap/110-replication-httpc.t b/test/etap/110-replication-httpc.t deleted file mode 100755 index b534b648..00000000 --- a/test/etap/110-replication-httpc.t +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -%% XXX: Figure out how to -include("couch_rep.hrl") --record(http_db, { - url, - auth = [], - resource = "", - headers = [ - {"User-Agent", "CouchDB/"++couch:version()}, - {"Accept", "application/json"}, - {"Accept-Encoding", "gzip"} - ], - qs = [], - method = get, - body = nil, - options = [ - {response_format,binary}, - {inactivity_timeout, 30000} - ], - retries = 10, - pause = 1, - conn = nil -}). - -server() -> "http://127.0.0.1:5984/". -dbname() -> "etap-test-db". - -config_files() -> - lists:map(fun test_util:build_file/1, [ - "etc/couchdb/default_dev.ini", - "etc/couchdb/local_dev.ini" - ]). - -main(_) -> - test_util:init_code_path(), - - etap:plan(6), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link(config_files()), - ibrowse:start(), - crypto:start(), - - couch_server:delete(list_to_binary(dbname()), []), - {ok, Db} = couch_db:create(list_to_binary(dbname()), []), - - test_welcome(), - test_binary_url(), - test_put(), - test_qs(), - test_db_exists(), - - couch_db:close(Db), - couch_server:delete(list_to_binary(dbname()), []), - ok. - -test_welcome() -> - WelcomeReq = #http_db{url=server()}, - Expect = {[ - {<<"couchdb">>, <<"Welcome">>}, - {<<"version">>, list_to_binary(couch:version())} - ]}, - etap:is( - couch_rep_httpc:request(WelcomeReq), - Expect, - "welcome request with url-as-list" - ). - -test_binary_url() -> - Req = #http_db{url=list_to_binary(server())}, - Expect = {[ - {<<"couchdb">>, <<"Welcome">>}, - {<<"version">>, list_to_binary(couch:version())} - ]}, - etap:is( - couch_rep_httpc:request(Req), - Expect, - "welcome request with url-as-binary" - ). - -test_put() -> - Req = #http_db{ - url = server() ++ dbname() ++ "/", - resource = "test_put", - body = {[{<<"foo">>, <<"bar">>}]}, - method = put - }, - {Resp} = couch_rep_httpc:request(Req), - etap:ok(couch_util:get_value(<<"ok">>, Resp), "ok:true on upload"), - etap:is(<<"test_put">>, couch_util:get_value(<<"id">>, Resp), "id is correct"). - -test_qs() -> - Req = #http_db{ - url = server() ++ dbname() ++ "/", - resource = "foo", - qs = [ - {bar, true}, - {baz, 1.03}, - {bif, mochijson2:encode(<<"1-23456">>)} - ] - }, - Expect = server() ++ dbname() ++ "/foo?bar=true&baz=1.03&bif=\"1-23456\"", - etap:is( - couch_rep_httpc:full_url(Req), - Expect, - "query-string proplist encoding ok" - ). - -test_db_exists() -> - Req1 = #http_db{url=server() ++ dbname() ++ "/"}, - Req2 = #http_db{url=server() ++ dbname() ++ "_foo/"}, - etap:is(couch_rep_httpc:db_exists(Req1), Req1, "db_exists true check"). - % etap:is(couch_rep_httpc:db_exists(Req2), false, "db_exists false check"). diff --git a/test/etap/111-replication-changes-feed.t b/test/etap/111-replication-changes-feed.t deleted file mode 100755 index bca12bc7..00000000 --- a/test/etap/111-replication-changes-feed.t +++ /dev/null @@ -1,254 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -%% XXX: Figure out how to -include("couch_db.hrl") --record(doc, {id= <<"">>, revs={0, []}, body={[]}, - attachments=[], deleted=false, meta=[]}). - --record(http_db, { - url, - auth = [], - resource = "", - headers = [ - {"User-Agent", "CouchDB/"++couch:version()}, - {"Accept", "application/json"}, - {"Accept-Encoding", "gzip"} - ], - qs = [], - method = get, - body = nil, - options = [ - {response_format,binary}, - {inactivity_timeout, 30000} - ], - retries = 10, - pause = 1, - conn = nil -}). - -config_files() -> - lists:map(fun test_util:build_file/1, [ - "etc/couchdb/default_dev.ini", - "etc/couchdb/local_dev.ini" - ]). - -main(_) -> - test_util:init_code_path(), - - etap:plan(13), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link(config_files()), - ibrowse:start(), - crypto:start(), - - couch_server:delete(<<"etap-test-db">>, []), - {ok, Db1} = couch_db:create(<<"etap-test-db">>, []), - test_all(local), - couch_db:close(Db1), - couch_server:delete(<<"etap-test-db">>, []), - - couch_server:delete(<<"etap-test-db">>, []), - {ok, Db2} = couch_db:create(<<"etap-test-db">>, []), - test_all(remote), - test_remote_only(), - couch_db:close(Db2), - couch_server:delete(<<"etap-test-db">>, []), - - ok. - -test_all(Type) -> - test_unchanged_db(Type), - test_simple_change(Type), - test_since_parameter(Type), - test_continuous_parameter(Type), - test_conflicts(Type), - test_deleted_conflicts(Type). - -test_remote_only() -> - test_chunk_reassembly(remote). - -test_unchanged_db(Type) -> - {ok, Pid} = start_changes_feed(Type, 0, false), - etap:is( - couch_rep_changes_feed:next(Pid), - complete, - io_lib:format( - "(~p) changes feed for unchanged DB is automatically complete", - [Type]) - ). - -test_simple_change(Type) -> - Expect = generate_change(), - {ok, Pid} = start_changes_feed(Type, 0, false), - etap:is( - {couch_rep_changes_feed:next(Pid), couch_rep_changes_feed:next(Pid)}, - {[Expect], complete}, - io_lib:format("(~p) change one document, get one row", [Type]) - ). - -test_since_parameter(Type) -> - {ok, Pid} = start_changes_feed(Type, get_update_seq(), false), - etap:is( - couch_rep_changes_feed:next(Pid), - complete, - io_lib:format( - "(~p) since query-string parameter allows us to skip changes", - [Type]) - ). - -test_continuous_parameter(Type) -> - {ok, Pid} = start_changes_feed(Type, get_update_seq(), true), - - % make the changes_feed request before the next update - Self = self(), - spawn(fun() -> - Change = couch_rep_changes_feed:next(Pid), - Self ! {actual, Change} - end), - - Expect = generate_change(), - etap:is( - receive {actual, Actual} -> Actual end, - [Expect], - io_lib:format( - "(~p) feed=continuous query-string parameter picks up new changes", - [Type]) - ), - - ok = couch_rep_changes_feed:stop(Pid). - -test_conflicts(Type) -> - Since = get_update_seq(), - Expect = generate_conflict(), - {ok, Pid} = start_changes_feed(Type, Since, false), - etap:is( - {couch_rep_changes_feed:next(Pid), couch_rep_changes_feed:next(Pid)}, - {[Expect], complete}, - io_lib:format("(~p) conflict revisions show up in feed", [Type]) - ). - -test_deleted_conflicts(Type) -> - Since = get_update_seq(), - {ExpectProps} = generate_conflict(), - - %% delete the conflict revision - Id = couch_util:get_value(<<"id">>, ExpectProps), - [Win, {[{<<"rev">>, Lose}]}] = couch_util:get_value(<<"changes">>, ExpectProps), - Doc = couch_doc:from_json_obj({[ - {<<"_id">>, Id}, - {<<"_rev">>, Lose}, - {<<"_deleted">>, true} - ]}), - Db = get_db(), - {ok, Rev} = couch_db:update_doc(Db, Doc, [full_commit]), - couch_db:close(Db), - - Expect = {[ - {<<"seq">>, get_update_seq()}, - {<<"id">>, Id}, - {<<"changes">>, [Win, {[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]} - ]}, - - {ok, Pid} = start_changes_feed(Type, Since, false), - etap:is( - {couch_rep_changes_feed:next(Pid), couch_rep_changes_feed:next(Pid)}, - {[Expect], complete}, - io_lib:format("(~p) deleted conflict revisions show up in feed", [Type]) - ). - -test_chunk_reassembly(Type) -> - Since = get_update_seq(), - Expect = [generate_change() || _I <- lists:seq(1,30)], - {ok, Pid} = start_changes_feed(Type, Since, false), - etap:is( - get_all_changes(Pid, []), - Expect, - io_lib:format("(~p) reassembles chunks split across TCP frames", - [Type]) - ). - -get_all_changes(Pid, Acc) -> - case couch_rep_changes_feed:next(Pid) of - complete -> - lists:flatten(lists:reverse(Acc)); - Else -> - get_all_changes(Pid, [Else|Acc]) - end. - -generate_change() -> - generate_change(couch_uuids:random()). - -generate_change(Id) -> - generate_change(Id, {[]}). - -generate_change(Id, EJson) -> - Doc = couch_doc:from_json_obj(EJson), - Db = get_db(), - {ok, Rev} = couch_db:update_doc(Db, Doc#doc{id = Id}, [full_commit]), - couch_db:close(Db), - {[ - {<<"seq">>, get_update_seq()}, - {<<"id">>, Id}, - {<<"changes">>, [{[{<<"rev">>, couch_doc:rev_to_str(Rev)}]}]} - ]}. - -generate_conflict() -> - Id = couch_uuids:random(), - Db = get_db(), - Doc1 = (couch_doc:from_json_obj({[<<"foo">>, <<"bar">>]}))#doc{id = Id}, - Doc2 = (couch_doc:from_json_obj({[<<"foo">>, <<"baz">>]}))#doc{id = Id}, - {ok, Rev1} = couch_db:update_doc(Db, Doc1, [full_commit]), - {ok, Rev2} = couch_db:update_doc(Db, Doc2, [full_commit, all_or_nothing]), - - %% relies on undocumented CouchDB conflict winner algo and revision sorting! - RevList = [{[{<<"rev">>, couch_doc:rev_to_str(R)}]} || R - <- lists:sort(fun(A,B) -> B<A end, [Rev1,Rev2])], - {[ - {<<"seq">>, get_update_seq()}, - {<<"id">>, Id}, - {<<"changes">>, RevList} - ]}. - -get_db() -> - {ok, Db} = couch_db:open(<<"etap-test-db">>, []), - Db. - -get_dbname(local) -> - "etap-test-db"; -get_dbname(remote) -> - "http://127.0.0.1:5984/etap-test-db/". - -get_update_seq() -> - Db = get_db(), - Seq = couch_db:get_update_seq(Db), - couch_db:close(Db), - Seq. - -start_changes_feed(local, Since, Continuous) -> - Props = [{<<"continuous">>, Continuous}], - couch_rep_changes_feed:start_link(self(), get_db(), Since, Props); -start_changes_feed(remote, Since, Continuous) -> - Props = [{<<"continuous">>, Continuous}], - Db = #http_db{url = get_dbname(remote)}, - couch_rep_changes_feed:start_link(self(), Db, Since, Props). diff --git a/test/etap/112-replication-missing-revs.t b/test/etap/112-replication-missing-revs.t deleted file mode 100755 index ea8466f6..00000000 --- a/test/etap/112-replication-missing-revs.t +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -%% XXX: Figure out how to -include("couch_db.hrl") - --record(doc, {id= <<"">>, revs={0, []}, body={[]}, - attachments=[], deleted=false, meta=[]}). - --record(http_db, { - url, - auth = [], - resource = "", - headers = [ - {"User-Agent", "CouchDB/"++couch:version()}, - {"Accept", "application/json"}, - {"Accept-Encoding", "gzip"} - ], - qs = [], - method = get, - body = nil, - options = [ - {response_format,binary}, - {inactivity_timeout, 30000} - ], - retries = 10, - pause = 1, - conn = nil -}). - -config_files() -> - lists:map(fun test_util:build_file/1, [ - "etc/couchdb/default_dev.ini", - "etc/couchdb/local_dev.ini" - ]). - -main(_) -> - test_util:init_code_path(), - - etap:plan(12), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link(config_files()), - ibrowse:start(), - crypto:start(), - - couch_server:delete(<<"etap-test-source">>, []), - couch_server:delete(<<"etap-test-target">>, []), - - Dbs1 = setup(), - test_all(local, local), - ok = teardown(Dbs1), - - Dbs2 = setup(), - test_all(local, remote), - ok = teardown(Dbs2), - - Dbs3 = setup(), - test_all(remote, local), - ok = teardown(Dbs3), - - Dbs4 = setup(), - test_all(remote, remote), - ok = teardown(Dbs4), - - ok. - -test_all(SrcType, TgtType) -> - test_unchanged_db(SrcType, TgtType), - test_multiple_changes(SrcType, TgtType), - test_changes_not_missing(SrcType, TgtType). - -test_unchanged_db(SrcType, TgtType) -> - {ok, Pid1} = start_changes_feed(SrcType, 0, false), - {ok, Pid2} = start_missing_revs(TgtType, Pid1), - etap:is( - couch_rep_missing_revs:next(Pid2), - complete, - io_lib:format( - "(~p, ~p) no missing revs if source is unchanged", - [SrcType, TgtType]) - ). - -test_multiple_changes(SrcType, TgtType) -> - Expect = {2, [generate_change(), generate_change()]}, - {ok, Pid1} = start_changes_feed(SrcType, 0, false), - {ok, Pid2} = start_missing_revs(TgtType, Pid1), - etap:is( - get_all_missing_revs(Pid2, {0, []}), - Expect, - io_lib:format("(~p, ~p) add src docs, get missing tgt revs + high seq", - [SrcType, TgtType]) - ). - -test_changes_not_missing(SrcType, TgtType) -> - %% put identical changes on source and target - Id = couch_uuids:random(), - {Id, _Seq, [Rev]} = Expect = generate_change(Id, {[]}, get_db(source)), - {Id, _, [Rev]} = generate_change(Id, {[]}, get_db(target)), - - %% confirm that this change is not in missing revs feed - {ok, Pid1} = start_changes_feed(SrcType, 0, false), - {ok, Pid2} = start_missing_revs(TgtType, Pid1), - {HighSeq, AllRevs} = get_all_missing_revs(Pid2, {0, []}), - - %% etap:none/3 has a bug, so just define it correctly here - etap:is( - lists:member(Expect, AllRevs), - false, - io_lib:format( - "(~p, ~p) skip revs that already exist on target", - [SrcType, TgtType]) - ). - -generate_change() -> - generate_change(couch_uuids:random()). - -generate_change(Id) -> - generate_change(Id, {[]}). - -generate_change(Id, EJson) -> - generate_change(Id, EJson, get_db(source)). - -generate_change(Id, EJson, Db) -> - Doc = couch_doc:from_json_obj(EJson), - Seq = get_update_seq(), - {ok, Rev} = couch_db:update_doc(Db, Doc#doc{id = Id}, [full_commit]), - couch_db:close(Db), - {Id, Seq+1, [Rev]}. - -get_all_missing_revs(Pid, {HighSeq, Revs}) -> - case couch_rep_missing_revs:next(Pid) of - complete -> - {HighSeq, lists:flatten(lists:reverse(Revs))}; - {Seq, More} -> - get_all_missing_revs(Pid, {Seq, [More|Revs]}) - end. - -get_db(source) -> - {ok, Db} = couch_db:open(<<"etap-test-source">>, []), - Db; -get_db(target) -> - {ok, Db} = couch_db:open(<<"etap-test-target">>, []), - Db. - -get_update_seq() -> - Db = get_db(source), - Seq = couch_db:get_update_seq(Db), - couch_db:close(Db), - Seq. - -setup() -> - {ok, DbA} = couch_db:create(<<"etap-test-source">>, []), - {ok, DbB} = couch_db:create(<<"etap-test-target">>, []), - [DbA, DbB]. - -teardown([DbA, DbB]) -> - couch_db:close(DbA), - couch_db:close(DbB), - couch_server:delete(<<"etap-test-source">>, []), - couch_server:delete(<<"etap-test-target">>, []), - ok. - -start_changes_feed(local, Since, Continuous) -> - Props = [{<<"continuous">>, Continuous}], - couch_rep_changes_feed:start_link(self(), get_db(source), Since, Props); -start_changes_feed(remote, Since, Continuous) -> - Props = [{<<"continuous">>, Continuous}], - Db = #http_db{url = "http://127.0.0.1:5984/etap-test-source/"}, - couch_rep_changes_feed:start_link(self(), Db, Since, Props). - -start_missing_revs(local, Changes) -> - couch_rep_missing_revs:start_link(self(), get_db(target), Changes, []); -start_missing_revs(remote, Changes) -> - Db = #http_db{url = "http://127.0.0.1:5984/etap-test-target/"}, - couch_rep_missing_revs:start_link(self(), Db, Changes, []). diff --git a/test/etap/113-replication-attachment-comp.t b/test/etap/113-replication-attachment-comp.t deleted file mode 100755 index 30f602ef..00000000 --- a/test/etap/113-replication-attachment-comp.t +++ /dev/null @@ -1,273 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - --record(user_ctx, { - name = null, - roles = [], - handler -}). - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -test_db_a_name() -> - <<"couch_test_rep_att_comp_a">>. - -test_db_b_name() -> - <<"couch_test_rep_att_comp_b">>. - -main(_) -> - test_util:init_code_path(), - etap:plan(30), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link([default_config()]), - put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), - put(port, couch_config:get("httpd", "port", "5984")), - application:start(inets), - ibrowse:start(), - timer:sleep(1000), - - % - % test pull replication - % - - delete_db(test_db_a_name()), - delete_db(test_db_b_name()), - create_db(test_db_a_name()), - create_db(test_db_b_name()), - - % enable compression - couch_config:set("attachments", "compression_level", "8"), - couch_config:set("attachments", "compressible_types", "text/*"), - - % store doc with text attachment in DB A - put_text_att(test_db_a_name()), - - % disable attachment compression - couch_config:set("attachments", "compression_level", "0"), - - % do pull replication - do_pull_replication(test_db_a_name(), test_db_b_name()), - - % verify that DB B has the attachment stored in compressed form - check_att_is_compressed(test_db_b_name()), - check_server_can_decompress_att(test_db_b_name()), - check_att_stubs(test_db_a_name(), test_db_b_name()), - - % - % test push replication - % - - delete_db(test_db_a_name()), - delete_db(test_db_b_name()), - create_db(test_db_a_name()), - create_db(test_db_b_name()), - - % enable compression - couch_config:set("attachments", "compression_level", "8"), - couch_config:set("attachments", "compressible_types", "text/*"), - - % store doc with text attachment in DB A - put_text_att(test_db_a_name()), - - % disable attachment compression - couch_config:set("attachments", "compression_level", "0"), - - % do push replication - do_push_replication(test_db_a_name(), test_db_b_name()), - - % verify that DB B has the attachment stored in compressed form - check_att_is_compressed(test_db_b_name()), - check_server_can_decompress_att(test_db_b_name()), - check_att_stubs(test_db_a_name(), test_db_b_name()), - - timer:sleep(3000), % to avoid mochiweb socket closed exceptions - delete_db(test_db_a_name()), - delete_db(test_db_b_name()), - couch_server_sup:stop(), - ok. - -put_text_att(DbName) -> - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {db_url(DbName) ++ "/testdoc1/readme.txt", [], - "text/plain", test_text_data()}, - [], - [{sync, true}]), - etap:is(Code, 201, "Created text attachment"), - ok. - -do_pull_replication(SourceDbName, TargetDbName) -> - RepObj = {[ - {<<"source">>, list_to_binary(db_url(SourceDbName))}, - {<<"target">>, TargetDbName} - ]}, - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - post, - {rep_url(), [], - "application/json", list_to_binary(couch_util:json_encode(RepObj))}, - [], - [{sync, true}]), - etap:is(Code, 200, "Pull replication successfully triggered"), - Json = couch_util:json_decode(Body), - RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), - etap:is(RepOk, true, "Pull replication completed with success"), - ok. - -do_push_replication(SourceDbName, TargetDbName) -> - RepObj = {[ - {<<"source">>, SourceDbName}, - {<<"target">>, list_to_binary(db_url(TargetDbName))} - ]}, - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - post, - {rep_url(), [], - "application/json", list_to_binary(couch_util:json_encode(RepObj))}, - [], - [{sync, true}]), - etap:is(Code, 200, "Push replication successfully triggered"), - Json = couch_util:json_decode(Body), - RepOk = couch_util:get_nested_json_value(Json, [<<"ok">>]), - etap:is(RepOk, true, "Push replication completed with success"), - ok. - -check_att_is_compressed(DbName) -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url(DbName) ++ "/testdoc1/readme.txt", - [{"Accept-Encoding", "gzip"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code for the attachment request is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, true, "The attachment was received in compressed form"), - Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), - etap:is( - Uncompressed, - test_text_data(), - "The attachment content is valid after decompression at the client side" - ), - ok. - -check_server_can_decompress_att(DbName) -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url(DbName) ++ "/testdoc1/readme.txt", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code for the attachment request is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is( - Gziped, false, "The attachment was not received in compressed form" - ), - etap:is( - Body, - test_text_data(), - "The attachment content is valid after server decompression" - ), - ok. - -check_att_stubs(SourceDbName, TargetDbName) -> - {ok, {{_, Code1, _}, _Headers1, Body1}} = http:request( - get, - {db_url(SourceDbName) ++ "/testdoc1?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is( - Code1, - 200, - "HTTP response code is 200 for the source DB doc request" - ), - Json1 = couch_util:json_decode(Body1), - SourceAttStub = couch_util:get_nested_json_value( - Json1, - [<<"_attachments">>, <<"readme.txt">>] - ), - {ok, {{_, Code2, _}, _Headers2, Body2}} = http:request( - get, - {db_url(TargetDbName) ++ "/testdoc1?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is( - Code2, - 200, - "HTTP response code is 200 for the target DB doc request" - ), - Json2 = couch_util:json_decode(Body2), - TargetAttStub = couch_util:get_nested_json_value( - Json2, - [<<"_attachments">>, <<"readme.txt">>] - ), - IdenticalStubs = (SourceAttStub =:= TargetAttStub), - etap:is(IdenticalStubs, true, "Attachment stubs are identical"), - TargetAttStubLength = couch_util:get_nested_json_value( - TargetAttStub, - [<<"length">>] - ), - TargetAttStubEnc = couch_util:get_nested_json_value( - TargetAttStub, - [<<"encoding">>] - ), - etap:is( - TargetAttStubEnc, - <<"gzip">>, - "Attachment stub has encoding property set to gzip" - ), - TargetAttStubEncLength = couch_util:get_nested_json_value( - TargetAttStub, - [<<"encoded_length">>] - ), - EncLengthDefined = is_integer(TargetAttStubEncLength), - etap:is( - EncLengthDefined, - true, - "Stubs have the encoded_length field properly defined" - ), - EncLengthSmaller = (TargetAttStubEncLength < TargetAttStubLength), - etap:is( - EncLengthSmaller, - true, - "Stubs have the encoded_length field smaller than their length field" - ), - ok. - -admin_user_ctx() -> - {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. - -create_db(DbName) -> - {ok, _} = couch_db:create(DbName, [admin_user_ctx()]). - -delete_db(DbName) -> - couch_server:delete(DbName, [admin_user_ctx()]). - -db_url(DbName) -> - "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ - binary_to_list(DbName). - -rep_url() -> - "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/_replicate". - -test_text_data() -> - {ok, Data} = file:read_file(test_util:source_file("README")), - binary_to_list(Data). diff --git a/test/etap/120-stats-collect.t b/test/etap/120-stats-collect.t deleted file mode 100755 index dee88765..00000000 --- a/test/etap/120-stats-collect.t +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -main(_) -> - test_util:init_code_path(), - etap:plan(11), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test() -> - couch_stats_collector:start(), - ok = test_counters(), - ok = test_abs_values(), - ok = test_proc_counting(), - ok = test_all(), - ok. - -test_counters() -> - AddCount = fun() -> couch_stats_collector:increment(foo) end, - RemCount = fun() -> couch_stats_collector:decrement(foo) end, - repeat(AddCount, 100), - repeat(RemCount, 25), - repeat(AddCount, 10), - repeat(RemCount, 5), - etap:is( - couch_stats_collector:get(foo), - 80, - "Incrememnt tracks correctly." - ), - - repeat(RemCount, 80), - etap:is( - couch_stats_collector:get(foo), - 0, - "Decremented to zaro." - ), - ok. - -test_abs_values() -> - lists:map(fun(Val) -> - couch_stats_collector:record(bar, Val) - end, lists:seq(1, 15)), - etap:is( - couch_stats_collector:get(bar), - lists:seq(1, 15), - "Absolute values are recorded correctly." - ), - - couch_stats_collector:clear(bar), - etap:is( - couch_stats_collector:get(bar), - nil, - "Absolute values are cleared correctly." - ), - ok. - -test_proc_counting() -> - Self = self(), - OnePid = spawn(fun() -> - couch_stats_collector:track_process_count(hoopla), - Self ! reporting, - receive sepuku -> ok end - end), - R1 = erlang:monitor(process, OnePid), - receive reporting -> ok end, - etap:is( - couch_stats_collector:get(hoopla), - 1, - "track_process_count incrememnts the counter." - ), - - TwicePid = spawn(fun() -> - couch_stats_collector:track_process_count(hoopla), - couch_stats_collector:track_process_count(hoopla), - Self ! reporting, - receive sepuku -> ok end - end), - R2 = erlang:monitor(process, TwicePid), - receive reporting -> ok end, - etap:is( - couch_stats_collector:get(hoopla), - 3, - "track_process_count allows more than one incrememnt per Pid" - ), - - OnePid ! sepuku, - receive {'DOWN', R1, _, _, _} -> ok end, - timer:sleep(250), - etap:is( - couch_stats_collector:get(hoopla), - 2, - "Process count is decremented when process exits." - ), - - TwicePid ! sepuku, - receive {'DOWN', R2, _, _, _} -> ok end, - timer:sleep(250), - etap:is( - couch_stats_collector:get(hoopla), - 0, - "Process count is decremented for each call to track_process_count." - ), - ok. - -test_all() -> - couch_stats_collector:record(bar, 0.0), - couch_stats_collector:record(bar, 1.0), - etap:is( - couch_stats_collector:all(), - [{foo, 0}, {hoopla, 0}, {bar, [1.0, 0.0]}], - "all/0 returns all counters and absolute values." - ), - - etap:is( - couch_stats_collector:all(incremental), - [{foo, 0}, {hoopla, 0}], - "all/1 returns only the specified type." - ), - - couch_stats_collector:record(zing, 90), - etap:is( - couch_stats_collector:all(absolute), - [{zing, [90]}, {bar, [1.0, 0.0]}], - "all/1 returns only the specified type." - ), - ok. - -repeat(_, 0) -> - ok; -repeat(Fun, Count) -> - Fun(), - repeat(Fun, Count-1). diff --git a/test/etap/121-stats-aggregates.cfg b/test/etap/121-stats-aggregates.cfg deleted file mode 100644 index 30e475da..00000000 --- a/test/etap/121-stats-aggregates.cfg +++ /dev/null @@ -1,19 +0,0 @@ -% Licensed to the Apache Software Foundation (ASF) under one -% or more contributor license agreements. See the NOTICE file -% distributed with this work for additional information -% regarding copyright ownership. The ASF licenses this file -% to you under the Apache License, Version 2.0 (the -% "License"); you may not use this file except in compliance -% with the License. You may obtain a copy of the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, -% software distributed under the License is distributed on an -% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -% KIND, either express or implied. See the License for the -% specific language governing permissions and limitations -% under the License. - -{testing, stuff, "yay description"}. -{number, '11', "randomosity"}. diff --git a/test/etap/121-stats-aggregates.ini b/test/etap/121-stats-aggregates.ini deleted file mode 100644 index cc5cd218..00000000 --- a/test/etap/121-stats-aggregates.ini +++ /dev/null @@ -1,20 +0,0 @@ -; Licensed to the Apache Software Foundation (ASF) under one -; or more contributor license agreements. See the NOTICE file -; distributed with this work for additional information -; regarding copyright ownership. The ASF licenses this file -; to you under the Apache License, Version 2.0 (the -; "License"); you may not use this file except in compliance -; with the License. You may obtain a copy of the License at -; -; http://www.apache.org/licenses/LICENSE-2.0 -; -; Unless required by applicable law or agreed to in writing, -; software distributed under the License is distributed on an -; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -; KIND, either express or implied. See the License for the -; specific language governing permissions and limitations -; under the License. - -[stats] -rate = 10000000 ; We call collect_sample in testing -samples = [0, 1] diff --git a/test/etap/121-stats-aggregates.t b/test/etap/121-stats-aggregates.t deleted file mode 100755 index d678aa9d..00000000 --- a/test/etap/121-stats-aggregates.t +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -ini_file() -> - test_util:source_file("test/etap/121-stats-aggregates.ini"). - -cfg_file() -> - test_util:source_file("test/etap/121-stats-aggregates.cfg"). - -main(_) -> - test_util:init_code_path(), - etap:plan(17), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail() - end, - ok. - -test() -> - couch_config:start_link([ini_file()]), - couch_stats_collector:start(), - couch_stats_aggregator:start(cfg_file()), - ok = test_all_empty(), - ok = test_get_empty(), - ok = test_count_stats(), - ok = test_abs_stats(), - ok. - -test_all_empty() -> - {Aggs} = couch_stats_aggregator:all(), - - etap:is(length(Aggs), 2, "There are only two aggregate types in testing."), - etap:is( - couch_util:get_value(testing, Aggs), - {[{stuff, make_agg(<<"yay description">>, - null, null, null, null, null)}]}, - "{testing, stuff} is empty at start." - ), - etap:is( - couch_util:get_value(number, Aggs), - {[{'11', make_agg(<<"randomosity">>, - null, null, null, null, null)}]}, - "{number, '11'} is empty at start." - ), - ok. - -test_get_empty() -> - etap:is( - couch_stats_aggregator:get_json({testing, stuff}), - make_agg(<<"yay description">>, null, null, null, null, null), - "Getting {testing, stuff} returns an empty aggregate." - ), - etap:is( - couch_stats_aggregator:get_json({number, '11'}), - make_agg(<<"randomosity">>, null, null, null, null, null), - "Getting {number, '11'} returns an empty aggregate." - ), - ok. - -test_count_stats() -> - lists:foreach(fun(_) -> - couch_stats_collector:increment({testing, stuff}) - end, lists:seq(1, 100)), - couch_stats_aggregator:collect_sample(), - etap:is( - couch_stats_aggregator:get_json({testing, stuff}), - make_agg(<<"yay description">>, 100, 100, null, 100, 100), - "COUNT: Adding values changes the stats." - ), - etap:is( - couch_stats_aggregator:get_json({testing, stuff}, 1), - make_agg(<<"yay description">>, 100, 100, null, 100, 100), - "COUNT: Adding values changes stats for all times." - ), - - timer:sleep(500), - couch_stats_aggregator:collect_sample(), - etap:is( - couch_stats_aggregator:get_json({testing, stuff}), - make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100), - "COUNT: Removing values changes stats." - ), - etap:is( - couch_stats_aggregator:get_json({testing, stuff}, 1), - make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100), - "COUNT: Removing values changes stats for all times." - ), - - timer:sleep(600), - couch_stats_aggregator:collect_sample(), - etap:is( - couch_stats_aggregator:get_json({testing, stuff}), - make_agg(<<"yay description">>, 100, 33.333, 57.735, 0, 100), - "COUNT: Letting time passes doesn't remove data from time 0 aggregates" - ), - etap:is( - couch_stats_aggregator:get_json({testing, stuff}, 1), - make_agg(<<"yay description">>, 0, 0, 0, 0, 0), - "COUNT: Letting time pass removes data from other time aggregates." - ), - ok. - -test_abs_stats() -> - lists:foreach(fun(X) -> - couch_stats_collector:record({number, 11}, X) - end, lists:seq(0, 10)), - couch_stats_aggregator:collect_sample(), - etap:is( - couch_stats_aggregator:get_json({number, 11}), - make_agg(<<"randomosity">>, 5, 5, null, 5, 5), - "ABS: Adding values changes the stats." - ), - etap:is( - couch_stats_aggregator:get_json({number, 11}, 1), - make_agg(<<"randomosity">>, 5, 5, null, 5, 5), - "ABS: Adding values changes stats for all times." - ), - - timer:sleep(500), - couch_stats_collector:record({number, 11}, 15), - couch_stats_aggregator:collect_sample(), - etap:is( - couch_stats_aggregator:get_json({number, 11}), - make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15), - "ABS: New values changes stats" - ), - etap:is( - couch_stats_aggregator:get_json({number, 11}, 1), - make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15), - "ABS: Removing values changes stats for all times." - ), - - timer:sleep(600), - couch_stats_aggregator:collect_sample(), - etap:is( - couch_stats_aggregator:get_json({number, 11}), - make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15), - "ABS: Letting time passes doesn't remove data from time 0 aggregates" - ), - etap:is( - couch_stats_aggregator:get_json({number, 11}, 1), - make_agg(<<"randomosity">>, 15, 15, null, 15, 15), - "ABS: Letting time pass removes data from other time aggregates." - ), - ok. - -make_agg(Desc, Sum, Mean, StdDev, Min, Max) -> - {[ - {description, Desc}, - {current, Sum}, - {sum, Sum}, - {mean, Mean}, - {stddev, StdDev}, - {min, Min}, - {max, Max} - ]}. diff --git a/test/etap/130-attachments-md5.t b/test/etap/130-attachments-md5.t deleted file mode 100755 index 4c40f83a..00000000 --- a/test/etap/130-attachments-md5.t +++ /dev/null @@ -1,252 +0,0 @@ -#!/usr/bin/env escript -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -test_db_name() -> - <<"etap-test-db">>. - -docid() -> - case get(docid) of - undefined -> - put(docid, 1), - "1"; - Count -> - put(docid, Count+1), - integer_to_list(Count+1) - end. - -main(_) -> - test_util:init_code_path(), - - etap:plan(16), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link([default_config()]), - Addr = couch_config:get("httpd", "bind_address", any), - Port = list_to_integer(couch_config:get("httpd", "port", "5984")), - put(addr, Addr), - put(port, Port), - timer:sleep(1000), - - couch_server:delete(test_db_name(), []), - couch_db:create(test_db_name(), []), - - test_identity_without_md5(), - test_chunked_without_md5(), - - test_identity_with_valid_md5(), - test_chunked_with_valid_md5_header(), - test_chunked_with_valid_md5_trailer(), - - test_identity_with_invalid_md5(), - test_chunked_with_invalid_md5_header(), - test_chunked_with_invalid_md5_trailer(), - - couch_server:delete(test_db_name(), []), - couch_server_sup:stop(), - ok. - -test_identity_without_md5() -> - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Content-Length: 34\r\n", - "\r\n", - "We all live in a yellow submarine!"], - - {Code, Json} = do_request(Data), - etap:is(Code, 201, "Stored with identity encoding and no MD5"), - etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). - -test_chunked_without_md5() -> - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Transfer-Encoding: chunked\r\n", - "\r\n", - to_hex(size(Part1)), "\r\n", - Part1, "\r\n", - to_hex(size(Part2)), "\r\n", - Part2, "\r\n" - "0\r\n" - "\r\n"], - - {Code, Json} = do_request(Data), - etap:is(Code, 201, "Stored with chunked encoding and no MD5"), - etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). - -test_identity_with_valid_md5() -> - AttData = "We all live in a yellow submarine!", - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Content-Length: 34\r\n", - "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", - "\r\n", - AttData], - - {Code, Json} = do_request(Data), - etap:is(Code, 201, "Stored with identity encoding and valid MD5"), - etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). - -test_chunked_with_valid_md5_header() -> - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Transfer-Encoding: chunked\r\n", - "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", - "\r\n", - to_hex(size(Part1)), "\r\n", - Part1, "\r\n", - to_hex(size(Part2)), "\r\n", - Part2, "\r\n", - "0\r\n", - "\r\n"], - - {Code, Json} = do_request(Data), - etap:is(Code, 201, "Stored with chunked encoding and valid MD5 header."), - etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). - -test_chunked_with_valid_md5_trailer() -> - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Transfer-Encoding: chunked\r\n", - "Trailer: Content-MD5\r\n", - "\r\n", - to_hex(size(Part1)), "\r\n", - Part1, "\r\n", - to_hex(size(Part2)), "\r\n", - Part2, "\r\n", - "0\r\n", - "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n", - "\r\n"], - - {Code, Json} = do_request(Data), - etap:is(Code, 201, "Stored with chunked encoding and valid MD5 trailer."), - etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success."). - -test_identity_with_invalid_md5() -> - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Content-Length: 34\r\n", - "Content-MD5: ", base64:encode(<<"foobar!">>), "\r\n", - "\r\n", - "We all live in a yellow submarine!"], - - {Code, Json} = do_request(Data), - etap:is(Code, 400, "Invalid MD5 header causes an error: identity"), - etap:is( - get_json(Json, [<<"error">>]), - <<"content_md5_mismatch">>, - "Body indicates reason for failure." - ). - -test_chunked_with_invalid_md5_header() -> - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Transfer-Encoding: chunked\r\n", - "Content-MD5: ", base64:encode(<<"so sneaky...">>), "\r\n", - "\r\n", - to_hex(size(Part1)), "\r\n", - Part1, "\r\n", - to_hex(size(Part2)), "\r\n", - Part2, "\r\n", - "0\r\n", - "\r\n"], - - {Code, Json} = do_request(Data), - etap:is(Code, 400, "Invalid MD5 header causes an error: chunked"), - etap:is( - get_json(Json, [<<"error">>]), - <<"content_md5_mismatch">>, - "Body indicates reason for failure." - ). - -test_chunked_with_invalid_md5_trailer() -> - AttData = <<"We all live in a yellow submarine!">>, - <<Part1:21/binary, Part2:13/binary>> = AttData, - Data = [ - "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n", - "Content-Type: text/plain\r\n", - "Transfer-Encoding: chunked\r\n", - "Trailer: Content-MD5\r\n", - "\r\n", - to_hex(size(Part1)), "\r\n", - Part1, "\r\n", - to_hex(size(Part2)), "\r\n", - Part2, "\r\n", - "0\r\n", - "Content-MD5: ", base64:encode(<<"Kool-Aid Fountain!">>), "\r\n", - "\r\n"], - - {Code, Json} = do_request(Data), - etap:is(Code, 400, "Invalid MD5 Trailer causes an error"), - etap:is( - get_json(Json, [<<"error">>]), - <<"content_md5_mismatch">>, - "Body indicates reason for failure." - ). - - -get_socket() -> - Options = [binary, {packet, 0}, {active, false}], - {ok, Sock} = gen_tcp:connect(get(addr), get(port), Options), - Sock. - -do_request(Request) -> - Sock = get_socket(), - gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))), - timer:sleep(1000), - {ok, R} = gen_tcp:recv(Sock, 0), - gen_tcp:close(Sock), - [Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]), - {ok, {http_response, _, Code, _}, _} = - erlang:decode_packet(http, Header, []), - Json = couch_util:json_decode(Body), - {Code, Json}. - -get_json(Json, Path) -> - couch_util:get_nested_json_value(Json, Path). - -to_hex(Val) -> - to_hex(Val, []). - -to_hex(0, Acc) -> - Acc; -to_hex(Val, Acc) -> - to_hex(Val div 16, [hex_char(Val rem 16) | Acc]). - -hex_char(V) when V < 10 -> $0 + V; -hex_char(V) -> $A + V - 10. - diff --git a/test/etap/140-attachment-comp.t b/test/etap/140-attachment-comp.t deleted file mode 100755 index 98d37abc..00000000 --- a/test/etap/140-attachment-comp.t +++ /dev/null @@ -1,711 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -test_db_name() -> - <<"couch_test_atts_compression">>. - -main(_) -> - test_util:init_code_path(), - - etap:plan(78), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link([default_config()]), - put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), - put(port, couch_config:get("httpd", "port", "5984")), - application:start(inets), - timer:sleep(1000), - couch_server:delete(test_db_name(), []), - couch_db:create(test_db_name(), []), - - couch_config:set("attachments", "compression_level", "8"), - couch_config:set("attachments", "compressible_types", "text/*"), - - create_1st_text_att(), - create_1st_png_att(), - create_2nd_text_att(), - create_2nd_png_att(), - - tests_for_1st_text_att(), - tests_for_1st_png_att(), - tests_for_2nd_text_att(), - tests_for_2nd_png_att(), - - create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"), - test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"), - - test_create_already_compressed_att_with_invalid_content_encoding( - db_url() ++ "/doc_att_deflate", - "readme.txt", - zlib:compress(test_text_data()), - "deflate" - ), - - test_create_already_compressed_att_with_invalid_content_encoding( - db_url() ++ "/doc_att_compress", - "readme.txt", - % Note: As of OTP R13B04, it seems there's no LZW compression - % (i.e. UNIX compress utility implementation) lib in OTP. - % However there's a simple working Erlang implementation at: - % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php - test_text_data(), - "compress" - ), - - timer:sleep(3000), % to avoid mochiweb socket closed exceptions - couch_server:delete(test_db_name(), []), - couch_server_sup:stop(), - ok. - -db_url() -> - "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ - binary_to_list(test_db_name()). - -create_1st_text_att() -> - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {db_url() ++ "/testdoc1/readme.txt", [], - "text/plain", test_text_data()}, - [], - [{sync, true}]), - etap:is(Code, 201, "Created text attachment using the standalone api"), - ok. - -create_1st_png_att() -> - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {db_url() ++ "/testdoc2/icon.png", [], - "image/png", test_png_data()}, - [], - [{sync, true}]), - etap:is(Code, 201, "Created png attachment using the standalone api"), - ok. - -% create a text attachment using the non-standalone attachment api -create_2nd_text_att() -> - DocJson = {[ - {<<"_attachments">>, {[ - {<<"readme.txt">>, {[ - {<<"content_type">>, <<"text/plain">>}, - {<<"data">>, base64:encode(test_text_data())} - ]} - }]}} - ]}, - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {db_url() ++ "/testdoc3", [], - "application/json", list_to_binary(couch_util:json_encode(DocJson))}, - [], - [{sync, true}]), - etap:is(Code, 201, "Created text attachment using the non-standalone api"), - ok. - -% create a png attachment using the non-standalone attachment api -create_2nd_png_att() -> - DocJson = {[ - {<<"_attachments">>, {[ - {<<"icon.png">>, {[ - {<<"content_type">>, <<"image/png">>}, - {<<"data">>, base64:encode(test_png_data())} - ]} - }]}} - ]}, - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {db_url() ++ "/testdoc4", [], - "application/json", list_to_binary(couch_util:json_encode(DocJson))}, - [], - [{sync, true}]), - etap:is(Code, 201, "Created png attachment using the non-standalone api"), - ok. - -create_already_compressed_att(DocUri, AttName) -> - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {DocUri ++ "/" ++ AttName, [{"Content-Encoding", "gzip"}], - "text/plain", zlib:gzip(test_text_data())}, - [], - [{sync, true}]), - etap:is( - Code, - 201, - "Created already compressed attachment using the standalone api" - ), - ok. - -tests_for_1st_text_att() -> - test_get_1st_text_att_with_accept_encoding_gzip(), - test_get_1st_text_att_without_accept_encoding_header(), - test_get_1st_text_att_with_accept_encoding_deflate(), - test_get_1st_text_att_with_accept_encoding_deflate_only(), - test_get_doc_with_1st_text_att(), - test_1st_text_att_stub(). - -tests_for_1st_png_att() -> - test_get_1st_png_att_without_accept_encoding_header(), - test_get_1st_png_att_with_accept_encoding_gzip(), - test_get_1st_png_att_with_accept_encoding_deflate(), - test_get_doc_with_1st_png_att(), - test_1st_png_att_stub(). - -tests_for_2nd_text_att() -> - test_get_2nd_text_att_with_accept_encoding_gzip(), - test_get_2nd_text_att_without_accept_encoding_header(), - test_get_doc_with_2nd_text_att(), - test_2nd_text_att_stub(). - -tests_for_2nd_png_att() -> - test_get_2nd_png_att_without_accept_encoding_header(), - test_get_2nd_png_att_with_accept_encoding_gzip(), - test_get_doc_with_2nd_png_att(), - test_2nd_png_att_stub(). - -test_get_1st_text_att_with_accept_encoding_gzip() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "gzip"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, true, "received body is gziped"), - Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), - etap:is( - Uncompressed, - test_text_data(), - "received data for the 1st text attachment is ok" - ), - ok. - -test_get_1st_text_att_without_accept_encoding_header() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc1/readme.txt", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_text_data(), - "received data for the 1st text attachment is ok" - ), - ok. - -test_get_1st_text_att_with_accept_encoding_deflate() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc1/readme.txt", [{"Accept-Encoding", "deflate"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - Deflated = lists:member({"content-encoding", "deflate"}, Headers), - etap:is(Deflated, false, "received body is not deflated"), - etap:is( - Body, - test_text_data(), - "received data for the 1st text attachment is ok" - ), - ok. - -test_get_1st_text_att_with_accept_encoding_deflate_only() -> - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - get, - {db_url() ++ "/testdoc1/readme.txt", - [{"Accept-Encoding", "deflate, *;q=0"}]}, - [], - [{sync, true}]), - etap:is( - Code, - 406, - "HTTP response code is 406 for an unsupported content encoding request" - ), - ok. - -test_get_1st_png_att_without_accept_encoding_header() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc2/icon.png", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_png_data(), - "received data for the 1st png attachment is ok" - ), - ok. - -test_get_1st_png_att_with_accept_encoding_gzip() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "gzip"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_png_data(), - "received data for the 1st png attachment is ok" - ), - ok. - -test_get_1st_png_att_with_accept_encoding_deflate() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc2/icon.png", [{"Accept-Encoding", "deflate"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Deflated = lists:member({"content-encoding", "deflate"}, Headers), - etap:is(Deflated, false, "received body is not deflated"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_png_data(), - "received data for the 1st png attachment is ok" - ), - ok. - -test_get_doc_with_1st_text_att() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc1?attachments=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - TextAttJson = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"readme.txt">>] - ), - TextAttType = couch_util:get_nested_json_value( - TextAttJson, - [<<"content_type">>] - ), - TextAttData = couch_util:get_nested_json_value( - TextAttJson, - [<<"data">>] - ), - etap:is( - TextAttType, - <<"text/plain">>, - "1st text attachment has type text/plain" - ), - %% check the attachment's data is the base64 encoding of the plain text - %% and not the base64 encoding of the gziped plain text - etap:is( - TextAttData, - base64:encode(test_text_data()), - "1st text attachment data is properly base64 encoded" - ), - ok. - -test_1st_text_att_stub() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc1?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - {TextAttJson} = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"readme.txt">>] - ), - TextAttLength = couch_util:get_value(<<"length">>, TextAttJson), - etap:is( - TextAttLength, - length(test_text_data()), - "1st text attachment stub length matches the uncompressed length" - ), - TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson), - etap:is( - TextAttEncoding, - <<"gzip">>, - "1st text attachment stub has the encoding field set to gzip" - ), - TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson), - etap:is( - TextAttEncLength, - iolist_size(zlib:gzip(test_text_data())), - "1st text attachment stub encoded_length matches the compressed length" - ), - ok. - -test_get_doc_with_1st_png_att() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc2?attachments=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - PngAttJson = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"icon.png">>] - ), - PngAttType = couch_util:get_nested_json_value( - PngAttJson, - [<<"content_type">>] - ), - PngAttData = couch_util:get_nested_json_value( - PngAttJson, - [<<"data">>] - ), - etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"), - etap:is( - PngAttData, - base64:encode(test_png_data()), - "1st png attachment data is properly base64 encoded" - ), - ok. - -test_1st_png_att_stub() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc2?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - {PngAttJson} = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"icon.png">>] - ), - PngAttLength = couch_util:get_value(<<"length">>, PngAttJson), - etap:is( - PngAttLength, - length(test_png_data()), - "1st png attachment stub length matches the uncompressed length" - ), - PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson), - etap:is( - PngEncoding, - undefined, - "1st png attachment stub doesn't have an encoding field" - ), - PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson), - etap:is( - PngEncLength, - undefined, - "1st png attachment stub doesn't have an encoded_length field" - ), - ok. - -test_get_2nd_text_att_with_accept_encoding_gzip() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc3/readme.txt", [{"Accept-Encoding", "gzip"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, true, "received body is gziped"), - Uncompressed = binary_to_list(zlib:gunzip(list_to_binary(Body))), - etap:is( - Uncompressed, - test_text_data(), - "received data for the 2nd text attachment is ok" - ), - ok. - -test_get_2nd_text_att_without_accept_encoding_header() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc3/readme.txt", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_text_data(), - "received data for the 2nd text attachment is ok" - ), - ok. - -test_get_2nd_png_att_without_accept_encoding_header() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc4/icon.png", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_png_data(), - "received data for the 2nd png attachment is ok" - ), - ok. - -test_get_2nd_png_att_with_accept_encoding_gzip() -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc4/icon.png", [{"Accept-Encoding", "gzip"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - Body, - test_png_data(), - "received data for the 2nd png attachment is ok" - ), - ok. - -test_get_doc_with_2nd_text_att() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc3?attachments=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - TextAttJson = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"readme.txt">>] - ), - TextAttType = couch_util:get_nested_json_value( - TextAttJson, - [<<"content_type">>] - ), - TextAttData = couch_util:get_nested_json_value( - TextAttJson, - [<<"data">>] - ), - etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"), - %% check the attachment's data is the base64 encoding of the plain text - %% and not the base64 encoding of the gziped plain text - etap:is( - TextAttData, - base64:encode(test_text_data()), - "2nd text attachment data is properly base64 encoded" - ), - ok. - -test_2nd_text_att_stub() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc3?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - {TextAttJson} = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"readme.txt">>] - ), - TextAttLength = couch_util:get_value(<<"length">>, TextAttJson), - etap:is( - TextAttLength, - length(test_text_data()), - "2nd text attachment stub length matches the uncompressed length" - ), - TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson), - etap:is( - TextAttEncoding, - <<"gzip">>, - "2nd text attachment stub has the encoding field set to gzip" - ), - TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson), - etap:is( - TextAttEncLength, - iolist_size(zlib:gzip(test_text_data())), - "2nd text attachment stub encoded_length matches the compressed length" - ), - ok. - -test_get_doc_with_2nd_png_att() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc4?attachments=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - PngAttJson = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"icon.png">>] - ), - PngAttType = couch_util:get_nested_json_value( - PngAttJson, - [<<"content_type">>] - ), - PngAttData = couch_util:get_nested_json_value( - PngAttJson, - [<<"data">>] - ), - etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"), - etap:is( - PngAttData, - base64:encode(test_png_data()), - "2nd png attachment data is properly base64 encoded" - ), - ok. - -test_2nd_png_att_stub() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/testdoc4?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - {PngAttJson} = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, <<"icon.png">>] - ), - PngAttLength = couch_util:get_value(<<"length">>, PngAttJson), - etap:is( - PngAttLength, - length(test_png_data()), - "2nd png attachment stub length matches the uncompressed length" - ), - PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson), - etap:is( - PngEncoding, - undefined, - "2nd png attachment stub doesn't have an encoding field" - ), - PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson), - etap:is( - PngEncLength, - undefined, - "2nd png attachment stub doesn't have an encoded_length field" - ), - ok. - -test_already_compressed_att(DocUri, AttName) -> - test_get_already_compressed_att_with_accept_gzip(DocUri, AttName), - test_get_already_compressed_att_without_accept(DocUri, AttName), - test_get_already_compressed_att_stub(DocUri, AttName). - -test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {DocUri ++ "/" ++ AttName, [{"Accept-Encoding", "gzip"}]}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, true, "received body is gziped"), - etap:is( - iolist_to_binary(Body), - iolist_to_binary(zlib:gzip(test_text_data())), - "received data for the already compressed attachment is ok" - ), - ok. - -test_get_already_compressed_att_without_accept(DocUri, AttName) -> - {ok, {{_, Code, _}, Headers, Body}} = http:request( - get, - {DocUri ++ "/" ++ AttName, []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Gziped = lists:member({"content-encoding", "gzip"}, Headers), - etap:is(Gziped, false, "received body is not gziped"), - etap:is( - iolist_to_binary(Body), - iolist_to_binary(test_text_data()), - "received data for the already compressed attachment is ok" - ), - ok. - -test_get_already_compressed_att_stub(DocUri, AttName) -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {DocUri ++ "?att_encoding_info=true", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "HTTP response code is 200"), - Json = couch_util:json_decode(Body), - {AttJson} = couch_util:get_nested_json_value( - Json, - [<<"_attachments">>, iolist_to_binary(AttName)] - ), - AttLength = couch_util:get_value(<<"length">>, AttJson), - etap:is( - AttLength, - iolist_size((zlib:gzip(test_text_data()))), - "Already compressed attachment stub length matches the " - "compressed length" - ), - Encoding = couch_util:get_value(<<"encoding">>, AttJson), - etap:is( - Encoding, - <<"gzip">>, - "Already compressed attachment stub has the encoding field set to gzip" - ), - EncLength = couch_util:get_value(<<"encoded_length">>, AttJson), - etap:is( - EncLength, - AttLength, - "Already compressed attachment stub encoded_length matches the " - "length field value" - ), - ok. - -test_create_already_compressed_att_with_invalid_content_encoding( - DocUri, AttName, AttData, Encoding) -> - {ok, {{_, Code, _}, _Headers, _Body}} = http:request( - put, - {DocUri ++ "/" ++ AttName, [{"Content-Encoding", Encoding}], - "text/plain", AttData}, - [], - [{sync, true}]), - etap:is( - Code, - 415, - "Couldn't create an already compressed attachment using the " - "unsupported encoding '" ++ Encoding ++ "'" - ), - ok. - -test_png_data() -> - {ok, Data} = file:read_file( - test_util:source_file("share/www/image/logo.png") - ), - binary_to_list(Data). - -test_text_data() -> - {ok, Data} = file:read_file( - test_util:source_file("README") - ), - binary_to_list(Data). diff --git a/test/etap/150-invalid-view-seq.t b/test/etap/150-invalid-view-seq.t deleted file mode 100755 index 0664c116..00000000 --- a/test/etap/150-invalid-view-seq.t +++ /dev/null @@ -1,192 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - --record(user_ctx, { - name = null, - roles = [], - handler -}). - -default_config() -> - test_util:build_file("etc/couchdb/default_dev.ini"). - -test_db_name() -> - <<"couch_test_invalid_view_seq">>. - -main(_) -> - test_util:init_code_path(), - - etap:plan(10), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -%% NOTE: since during the test we stop the server, -%% a huge and ugly but harmless stack trace is sent to stderr -%% -test() -> - couch_server_sup:start_link([default_config()]), - timer:sleep(1000), - delete_db(), - create_db(), - - create_docs(), - create_design_doc(), - - % make DB file backup - backup_db_file(), - - put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")), - put(port, couch_config:get("httpd", "port", "5984")), - application:start(inets), - - create_new_doc(), - query_view_before_restore_backup(), - - % restore DB file backup after querying view - restore_backup_db_file(), - - query_view_after_restore_backup(), - - delete_db(), - couch_server_sup:stop(), - ok. - -admin_user_ctx() -> - {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. - -create_db() -> - {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]). - -delete_db() -> - couch_server:delete(test_db_name(), [admin_user_ctx()]). - -create_docs() -> - {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), - Doc1 = couch_doc:from_json_obj({[ - {<<"_id">>, <<"doc1">>}, - {<<"value">>, 1} - - ]}), - Doc2 = couch_doc:from_json_obj({[ - {<<"_id">>, <<"doc2">>}, - {<<"value">>, 2} - - ]}), - Doc3 = couch_doc:from_json_obj({[ - {<<"_id">>, <<"doc3">>}, - {<<"value">>, 3} - - ]}), - {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]), - couch_db:ensure_full_commit(Db), - couch_db:close(Db). - -create_design_doc() -> - {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), - DDoc = couch_doc:from_json_obj({[ - {<<"_id">>, <<"_design/foo">>}, - {<<"language">>, <<"javascript">>}, - {<<"views">>, {[ - {<<"bar">>, {[ - {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>} - ]}} - ]}} - ]}), - {ok, _} = couch_db:update_docs(Db, [DDoc]), - couch_db:ensure_full_commit(Db), - couch_db:close(Db). - -backup_db_file() -> - DbFile = test_util:build_file("tmp/lib/" ++ - binary_to_list(test_db_name()) ++ ".couch"), - {ok, _} = file:copy(DbFile, DbFile ++ ".backup"), - ok. - -create_new_doc() -> - {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]), - Doc666 = couch_doc:from_json_obj({[ - {<<"_id">>, <<"doc666">>}, - {<<"value">>, 999} - - ]}), - {ok, _} = couch_db:update_docs(Db, [Doc666]), - couch_db:ensure_full_commit(Db), - couch_db:close(Db). - -db_url() -> - "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++ - binary_to_list(test_db_name()). - -query_view_before_restore_backup() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/_design/foo/_view/bar", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "Got view response before restoring backup."), - ViewJson = couch_util:json_decode(Body), - Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]), - HasDoc1 = has_doc("doc1", Rows), - HasDoc2 = has_doc("doc2", Rows), - HasDoc3 = has_doc("doc3", Rows), - HasDoc666 = has_doc("doc666", Rows), - etap:is(HasDoc1, true, "Before backup restore, view has doc1"), - etap:is(HasDoc2, true, "Before backup restore, view has doc2"), - etap:is(HasDoc3, true, "Before backup restore, view has doc3"), - etap:is(HasDoc666, true, "Before backup restore, view has doc666"), - ok. - -has_doc(DocId1, Rows) -> - DocId = iolist_to_binary(DocId1), - lists:any( - fun({R}) -> lists:member({<<"id">>, DocId}, R) end, - Rows - ). - -restore_backup_db_file() -> - couch_server_sup:stop(), - timer:sleep(3000), - DbFile = test_util:build_file("tmp/lib/" ++ - binary_to_list(test_db_name()) ++ ".couch"), - ok = file:delete(DbFile), - ok = file:rename(DbFile ++ ".backup", DbFile), - couch_server_sup:start_link([default_config()]), - timer:sleep(1000), - ok. - -query_view_after_restore_backup() -> - {ok, {{_, Code, _}, _Headers, Body}} = http:request( - get, - {db_url() ++ "/_design/foo/_view/bar", []}, - [], - [{sync, true}]), - etap:is(Code, 200, "Got view response after restoring backup."), - ViewJson = couch_util:json_decode(Body), - Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]), - HasDoc1 = has_doc("doc1", Rows), - HasDoc2 = has_doc("doc2", Rows), - HasDoc3 = has_doc("doc3", Rows), - HasDoc666 = has_doc("doc666", Rows), - etap:is(HasDoc1, true, "After backup restore, view has doc1"), - etap:is(HasDoc2, true, "After backup restore, view has doc2"), - etap:is(HasDoc3, true, "After backup restore, view has doc3"), - etap:is(HasDoc666, false, "After backup restore, view does not have doc666"), - ok. diff --git a/test/etap/160-vhosts.t b/test/etap/160-vhosts.t deleted file mode 100755 index 7694010a..00000000 --- a/test/etap/160-vhosts.t +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env escript -%% -*- erlang -*- - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -%% XXX: Figure out how to -include("couch_rep.hrl") --record(http_db, { - url, - auth = [], - resource = "", - headers = [ - {"User-Agent", "CouchDB/"++couch:version()}, - {"Accept", "application/json"}, - {"Accept-Encoding", "gzip"} - ], - qs = [], - method = get, - body = nil, - options = [ - {response_format,binary}, - {inactivity_timeout, 30000} - ], - retries = 10, - pause = 1, - conn = nil -}). - --record(user_ctx, { - name = null, - roles = [], - handler -}). - -server() -> "http://127.0.0.1:5984/". -dbname() -> "etap-test-db". -admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}. - -config_files() -> - lists:map(fun test_util:build_file/1, [ - "etc/couchdb/default_dev.ini", - "etc/couchdb/local_dev.ini" - ]). - -main(_) -> - test_util:init_code_path(), - - etap:plan(4), - case (catch test()) of - ok -> - etap:end_tests(); - Other -> - etap:diag(io_lib:format("Test died abnormally: ~p", [Other])), - etap:bail(Other) - end, - ok. - -test() -> - couch_server_sup:start_link(config_files()), - ibrowse:start(), - crypto:start(), - - couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]), - {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]), - - Doc = couch_doc:from_json_obj({[ - {<<"_id">>, <<"doc1">>}, - {<<"value">>, 666} - ]}), - {ok, _} = couch_db:update_docs(Db, [Doc]), - couch_db:ensure_full_commit(Db), - - %% end boilerplate, start test - - couch_config:set("vhosts", "example.com", "/etap-test-db", false), - test_regular_request(), - test_vhost_request(), - test_vhost_request_with_qs(), - test_vhost_request_with_global(), - - %% restart boilerplate - couch_db:close(Db), - couch_server:delete(list_to_binary(dbname()), []), - ok. - -test_regular_request() -> - case ibrowse:send_req(server(), [], get, []) of - {ok, _, _, Body} -> - {[{<<"couchdb">>, <<"Welcome">>}, - {<<"version">>,_} - ]} = couch_util:json_decode(Body), - etap:is(true, true, "should return server info"); - _Else -> false - end. - -test_vhost_request() -> - case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of - {ok, _, _, Body} -> - {[{<<"db_name">>, <<"etap-test-db">>},_,_,_,_,_,_,_,_,_]} - = couch_util:json_decode(Body), - etap:is(true, true, "should return database info"); - _Else -> false - end. - -test_vhost_request_with_qs() -> - Url = server() ++ "doc1?revs_info=true", - case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of - {ok, _, _, Body} -> - {JsonProps} = couch_util:json_decode(Body), - HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps), - etap:is(HasRevsInfo, true, "should return _revs_info"); - _Else -> false - end. - -test_vhost_request_with_global() -> - Url2 = server() ++ "_utils/index.html", - case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of - {ok, _, _, Body2} -> - "<!DOCTYPE" ++ _Foo = Body2, - etap:is(true, true, "should serve /_utils even inside vhosts"); - _Else -> false - end. diff --git a/test/etap/run.tpl b/test/etap/run.tpl deleted file mode 100644 index faf0f456..00000000 --- a/test/etap/run.tpl +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh -e - -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -SRCDIR="%abs_top_srcdir%" -BUILDIR="%abs_top_builddir%" - -export ERL_FLAGS="$ERL_FLAGS -pa $BUILDIR/test/etap/" - -if test $# -gt 0; then - while [ $# -gt 0 ]; do - $1 - shift - done -else - prove $SRCDIR/test/etap/*.t -fi diff --git a/test/etap/test_util.erl.in b/test/etap/test_util.erl.in deleted file mode 100644 index 4c42edb1..00000000 --- a/test/etap/test_util.erl.in +++ /dev/null @@ -1,35 +0,0 @@ -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - --module(test_util). - --export([init_code_path/0]). --export([source_file/1, build_file/1]). - -srcdir() -> - "@abs_top_srcdir@". - -builddir() -> - "@abs_top_builddir@". - -init_code_path() -> - Paths = ["etap", "couchdb", "erlang-oauth", "ibrowse", "mochiweb"], - lists:foreach(fun(Name) -> - code:add_pathz(filename:join([builddir(), "src", Name])) - end, Paths). - -source_file(Name) -> - filename:join([srcdir(), Name]). - -build_file(Name) -> - filename:join([builddir(), Name]). - diff --git a/test/javascript/cli_runner.js b/test/javascript/cli_runner.js deleted file mode 100644 index cdbe2e73..00000000 --- a/test/javascript/cli_runner.js +++ /dev/null @@ -1,52 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -var console = { - log: function(arg) { - var msg = (arg.toString()).replace(/\n/g, "\n "); - print("# " + msg); - } -}; - -function T(arg1, arg2) { - if(!arg1) { - throw((arg2 ? arg2 : arg1).toString()); - } -} - -function runTestConsole(num, name, func) { - try { - func(); - print("ok " + num + " " + name); - } catch(e) { - msg = e.toString(); - msg = msg.replace(/\n/g, "\n "); - print("not ok " + num + " " + name + " " + msg); - } -} - -function runAllTestsConsole() { - var numTests = 0; - for(var t in couchTests) { numTests += 1; } - print("1.." + numTests); - var testId = 0; - for(var t in couchTests) { - testId += 1; - runTestConsole(testId, t, couchTests[t]); - } -}; - -try { - runAllTestsConsole(); -} catch (e) { - p("# " + e.toString()); -} diff --git a/test/javascript/couch_http.js b/test/javascript/couch_http.js deleted file mode 100644 index 5f4716d2..00000000 --- a/test/javascript/couch_http.js +++ /dev/null @@ -1,62 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); you may not -// use this file except in compliance with the License. You may obtain a copy of -// the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -// License for the specific language governing permissions and limitations under -// the License. - -(function() { - CouchHTTP.prototype.base_url = "http://127.0.0.1:5984" - - if(typeof(CouchHTTP) != "undefined") { - CouchHTTP.prototype.open = function(method, url, async) { - if(!/^\s*http:\/\//.test(url)) { - if(/^[^\/]/.test(url)) { - url = this.base_url + "/" + url; - } else { - url = this.base_url + url; - } - } - - return this._open(method, url, async); - }; - - CouchHTTP.prototype.setRequestHeader = function(name, value) { - // Drop content-length headers because cURL will set it for us - // based on body length - if(name.toLowerCase().replace(/^\s+|\s+$/g, '') != "content-length") { - this._setRequestHeader(name, value); - } - } - - CouchHTTP.prototype.send = function(body) { - this._send(body || ""); - var headers = {}; - this._headers.forEach(function(hdr) { - var pair = hdr.split(":"); - var name = pair.shift(); - headers[name] = pair.join(":").replace(/^\s+|\s+$/g, ""); - }); - this.headers = headers; - }; - - CouchHTTP.prototype.getResponseHeader = function(name) { - for(var hdr in this.headers) { - if(hdr.toLowerCase() == name.toLowerCase()) { - return this.headers[hdr]; - } - } - return null; - }; - } -})(); - -CouchDB.urlPrefix = ""; -CouchDB.newXhr = function() { - return new CouchHTTP(); -}; diff --git a/test/javascript/run.tpl b/test/javascript/run.tpl deleted file mode 100644 index c5abe6e7..00000000 --- a/test/javascript/run.tpl +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/sh -e - -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -SRC_DIR=%abs_top_srcdir% -SCRIPT_DIR=$SRC_DIR/share/www/script -JS_TEST_DIR=$SRC_DIR/test/javascript - -COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs - -cat $SCRIPT_DIR/json2.js \ - $SCRIPT_DIR/sha1.js \ - $SCRIPT_DIR/oauth.js \ - $SCRIPT_DIR/couch.js \ - $SCRIPT_DIR/couch_test_runner.js \ - $SCRIPT_DIR/couch_tests.js \ - $SCRIPT_DIR/test/*.js \ - $JS_TEST_DIR/couch_http.js \ - $JS_TEST_DIR/cli_runner.js \ - | $COUCHJS - diff --git a/test/view_server/query_server_spec.rb b/test/view_server/query_server_spec.rb deleted file mode 100644 index de1df5c1..00000000 --- a/test/view_server/query_server_spec.rb +++ /dev/null @@ -1,824 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may not -# use this file except in compliance with the License. You may obtain a copy of -# the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations under -# the License. - -# to run (requires ruby and rspec): -# spec test/view_server/query_server_spec.rb -f specdoc --color -# -# environment options: -# QS_TRACE=true -# shows full output from the query server -# QS_LANG=lang -# run tests on the query server (for now, one of: js, erlang) -# - -COUCH_ROOT = "#{File.dirname(__FILE__)}/../.." unless defined?(COUCH_ROOT) -LANGUAGE = ENV["QS_LANG"] || "js" - -puts "Running query server specs for #{LANGUAGE} query server" - -require 'spec' -require 'json' - -class OSProcessRunner - def self.run - trace = ENV["QS_TRACE"] || false - puts "launching #{run_command}" if trace - if block_given? - IO.popen(run_command, "r+") do |io| - qs = QueryServerRunner.new(io, trace) - yield qs - end - else - io = IO.popen(run_command, "r+") - QueryServerRunner.new(io, trace) - end - end - def initialize io, trace = false - @qsio = io - @trace = trace - end - def close - @qsio.close - end - def reset! - run(["reset"]) - end - def add_fun(fun) - run(["add_fun", fun]) - end - def teach_ddoc(ddoc) - run(["ddoc", "new", ddoc_id(ddoc), ddoc]) - end - def ddoc_run(ddoc, fun_path, args) - run(["ddoc", ddoc_id(ddoc), fun_path, args]) - end - def ddoc_id(ddoc) - d_id = ddoc["_id"] - raise 'ddoc must have _id' unless d_id - d_id - end - def get_chunks - resp = jsgets - raise "not a chunk" unless resp.first == "chunks" - return resp[1] - end - def run json - rrun json - jsgets - end - def rrun json - line = json.to_json - puts "run: #{line}" if @trace - @qsio.puts line - end - def rgets - resp = @qsio.gets - puts "got: #{resp}" if @trace - resp - end - def jsgets - resp = rgets - # err = @qserr.gets - # puts "err: #{err}" if err - if resp - begin - rj = JSON.parse("[#{resp.chomp}]")[0] - rescue JSON::ParserError - puts "JSON ERROR (dump under trace mode)" - # puts resp.chomp - while resp = rgets - # puts resp.chomp - end - end - if rj.respond_to?(:[]) && rj.is_a?(Array) - if rj[0] == "log" - log = rj[1] - puts "log: #{log}" if @trace - rj = jsgets - end - end - rj - else - raise "no response" - end - end -end - -class QueryServerRunner < OSProcessRunner - - COMMANDS = { - "js" => "#{COUCH_ROOT}/bin/couchjs_dev #{COUCH_ROOT}/share/server/main.js", - "erlang" => "#{COUCH_ROOT}/test/view_server/run_native_process.es" - } - - def self.run_command - COMMANDS[LANGUAGE] - end -end - -class ExternalRunner < OSProcessRunner - def self.run_command - "#{COUCH_ROOT}/src/couchdb/couchjs #{COUCH_ROOT}/share/server/echo.js" - end -end - -# we could organize this into a design document per language. -# that would make testing future languages really easy. - -functions = { - "emit-twice" => { - "js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}}, - "erlang" => <<-ERLANG - fun({Doc}) -> - A = couch_util:get_value(<<"a">>, Doc, null), - Emit(<<"foo">>, A), - Emit(<<"bar">>, A) - end. - ERLANG - }, - "emit-once" => { - "js" => <<-JS, - function(doc){ - emit("baz",doc.a) - } - JS - "erlang" => <<-ERLANG - fun({Doc}) -> - A = couch_util:get_value(<<"a">>, Doc, null), - Emit(<<"baz">>, A) - end. - ERLANG - }, - "reduce-values-length" => { - "js" => %{function(keys, values, rereduce) { return values.length; }}, - "erlang" => %{fun(Keys, Values, ReReduce) -> length(Values) end.} - }, - "reduce-values-sum" => { - "js" => %{function(keys, values, rereduce) { return sum(values); }}, - "erlang" => %{fun(Keys, Values, ReReduce) -> lists:sum(Values) end.} - }, - "validate-forbidden" => { - "js" => <<-JS, - function(newDoc, oldDoc, userCtx) { - if(newDoc.bad) - throw({forbidden:"bad doc"}); "foo bar"; - } - JS - "erlang" => <<-ERLANG - fun({NewDoc}, _OldDoc, _UserCtx) -> - case couch_util:get_value(<<"bad">>, NewDoc) of - undefined -> 1; - _ -> {[{forbidden, <<"bad doc">>}]} - end - end. - ERLANG - }, - "show-simple" => { - "js" => <<-JS, - function(doc, req) { - log("ok"); - return [doc.title, doc.body].join(' - '); - } - JS - "erlang" => <<-ERLANG - fun({Doc}, Req) -> - Title = couch_util:get_value(<<"title">>, Doc), - Body = couch_util:get_value(<<"body">>, Doc), - Resp = <<Title/binary, " - ", Body/binary>>, - {[{<<"body">>, Resp}]} - end. - ERLANG - }, - "show-headers" => { - "js" => <<-JS, - function(doc, req) { - var resp = {"code":200, "headers":{"X-Plankton":"Rusty"}}; - resp.body = [doc.title, doc.body].join(' - '); - return resp; - } - JS - "erlang" => <<-ERLANG - fun({Doc}, Req) -> - Title = couch_util:get_value(<<"title">>, Doc), - Body = couch_util:get_value(<<"body">>, Doc), - Resp = <<Title/binary, " - ", Body/binary>>, - {[ - {<<"code">>, 200}, - {<<"headers">>, {[{<<"X-Plankton">>, <<"Rusty">>}]}}, - {<<"body">>, Resp} - ]} - end. - ERLANG - }, - "show-sends" => { - "js" => <<-JS, - function(head, req) { - start({headers:{"Content-Type" : "text/plain"}}); - send("first chunk"); - send('second "chunk"'); - return "tail"; - }; - JS - "erlang" => <<-ERLANG - fun(Head, Req) -> - Resp = {[ - {<<"headers">>, {[{<<"Content-Type">>, <<"text/plain">>}]}} - ]}, - Start(Resp), - Send(<<"first chunk">>), - Send(<<"second \\\"chunk\\\"">>), - <<"tail">> - end. - ERLANG - }, - "show-while-get-rows" => { - "js" => <<-JS, - function(head, req) { - send("first chunk"); - send(req.q); - var row; - log("about to getRow " + typeof(getRow)); - while(row = getRow()) { - send(row.key); - }; - return "tail"; - }; - JS - "erlang" => <<-ERLANG, - fun(Head, {Req}) -> - Send(<<"first chunk">>), - Send(couch_util:get_value(<<"q">>, Req)), - Fun = fun({Row}, _) -> - Send(couch_util:get_value(<<"key">>, Row)), - {ok, nil} - end, - {ok, _} = FoldRows(Fun, nil), - <<"tail">> - end. - ERLANG - }, - "show-while-get-rows-multi-send" => { - "js" => <<-JS, - function(head, req) { - send("bacon"); - var row; - log("about to getRow " + typeof(getRow)); - while(row = getRow()) { - send(row.key); - send("eggs"); - }; - return "tail"; - }; - JS - "erlang" => <<-ERLANG, - fun(Head, Req) -> - Send(<<"bacon">>), - Fun = fun({Row}, _) -> - Send(couch_util:get_value(<<"key">>, Row)), - Send(<<"eggs">>), - {ok, nil} - end, - FoldRows(Fun, nil), - <<"tail">> - end. - ERLANG - }, - "list-simple" => { - "js" => <<-JS, - function(head, req) { - send("first chunk"); - send(req.q); - var row; - while(row = getRow()) { - send(row.key); - }; - return "early"; - }; - JS - "erlang" => <<-ERLANG, - fun(Head, {Req}) -> - Send(<<"first chunk">>), - Send(couch_util:get_value(<<"q">>, Req)), - Fun = fun({Row}, _) -> - Send(couch_util:get_value(<<"key">>, Row)), - {ok, nil} - end, - FoldRows(Fun, nil), - <<"early">> - end. - ERLANG - }, - "list-chunky" => { - "js" => <<-JS, - function(head, req) { - send("first chunk"); - send(req.q); - var row, i=0; - while(row = getRow()) { - send(row.key); - i += 1; - if (i > 2) { - return('early tail'); - } - }; - }; - JS - "erlang" => <<-ERLANG, - fun(Head, {Req}) -> - Send(<<"first chunk">>), - Send(couch_util:get_value(<<"q">>, Req)), - Fun = fun - ({Row}, Count) when Count < 2 -> - Send(couch_util:get_value(<<"key">>, Row)), - {ok, Count+1}; - ({Row}, Count) when Count == 2 -> - Send(couch_util:get_value(<<"key">>, Row)), - {stop, <<"early tail">>} - end, - {ok, Tail} = FoldRows(Fun, 0), - Tail - end. - ERLANG - }, - "list-old-style" => { - "js" => <<-JS, - function(head, req, foo, bar) { - return "stuff"; - } - JS - "erlang" => <<-ERLANG, - fun(Head, Req, Foo, Bar) -> - <<"stuff">> - end. - ERLANG - }, - "list-capped" => { - "js" => <<-JS, - function(head, req) { - send("bacon") - var row, i = 0; - while(row = getRow()) { - send(row.key); - i += 1; - if (i > 2) { - return('early'); - } - }; - } - JS - "erlang" => <<-ERLANG, - fun(Head, Req) -> - Send(<<"bacon">>), - Fun = fun - ({Row}, Count) when Count < 2 -> - Send(couch_util:get_value(<<"key">>, Row)), - {ok, Count+1}; - ({Row}, Count) when Count == 2 -> - Send(couch_util:get_value(<<"key">>, Row)), - {stop, <<"early">>} - end, - {ok, Tail} = FoldRows(Fun, 0), - Tail - end. - ERLANG - }, - "list-raw" => { - "js" => <<-JS, - function(head, req) { - // log(this.toSource()); - // log(typeof send); - send("first chunk"); - send(req.q); - var row; - while(row = getRow()) { - send(row.key); - }; - return "tail"; - }; - JS - "erlang" => <<-ERLANG, - fun(Head, {Req}) -> - Send(<<"first chunk">>), - Send(couch_util:get_value(<<"q">>, Req)), - Fun = fun({Row}, _) -> - Send(couch_util:get_value(<<"key">>, Row)), - {ok, nil} - end, - FoldRows(Fun, nil), - <<"tail">> - end. - ERLANG - }, - "filter-basic" => { - "js" => <<-JS, - function(doc, req) { - if (doc.good) { - return true; - } - } - JS - "erlang" => <<-ERLANG, - fun({Doc}, Req) -> - couch_util:get_value(<<"good">>, Doc) - end. - ERLANG - }, - "update-basic" => { - "js" => <<-JS, - function(doc, req) { - doc.world = "hello"; - var resp = [doc, "hello doc"]; - return resp; - } - JS - "erlang" => <<-ERLANG, - fun({Doc}, Req) -> - Doc2 = [{<<"world">>, <<"hello">>}|Doc], - [{Doc2}, {[{<<"body">>, <<"hello doc">>}]}] - end. - ERLANG - }, - "error" => { - "js" => <<-JS, - function() { - throw(["error","error_key","testing"]); - } - JS - "erlang" => <<-ERLANG - fun(A, B) -> - throw([<<"error">>,<<"error_key">>,<<"testing">>]) - end. - ERLANG - }, - "fatal" => { - "js" => <<-JS, - function() { - throw(["fatal","error_key","testing"]); - } - JS - "erlang" => <<-ERLANG - fun(A, B) -> - throw([<<"fatal">>,<<"error_key">>,<<"testing">>]) - end. - ERLANG - } -} - -def make_ddoc(fun_path, fun_str) - doc = {"_id"=>"foo"} - d = doc - while p = fun_path.shift - l = p - if !fun_path.empty? - d[p] = {} - d = d[p] - end - end - d[l] = fun_str - doc -end - -describe "query server normal case" do - before(:all) do - `cd #{COUCH_ROOT} && make` - @qs = QueryServerRunner.run - end - after(:all) do - @qs.close - end - it "should reset" do - @qs.run(["reset"]).should == true - end - it "should not erase ddocs on reset" do - @fun = functions["show-simple"][LANGUAGE] - @ddoc = make_ddoc(["shows","simple"], @fun) - @qs.teach_ddoc(@ddoc) - @qs.run(["reset"]).should == true - @qs.ddoc_run(@ddoc, - ["shows","simple"], - [{:title => "Best ever", :body => "Doc body"}, {}]).should == - ["resp", {"body" => "Best ever - Doc body"}] - end - - it "should run map funs" do - @qs.reset! - @qs.run(["add_fun", functions["emit-twice"][LANGUAGE]]).should == true - @qs.run(["add_fun", functions["emit-once"][LANGUAGE]]).should == true - rows = @qs.run(["map_doc", {:a => "b"}]) - rows[0][0].should == ["foo", "b"] - rows[0][1].should == ["bar", "b"] - rows[1][0].should == ["baz", "b"] - end - describe "reduce" do - before(:all) do - @fun = functions["reduce-values-length"][LANGUAGE] - @qs.reset! - end - it "should reduce" do - kvs = (0...10).collect{|i|[i,i*2]} - @qs.run(["reduce", [@fun], kvs]).should == [true, [10]] - end - end - describe "rereduce" do - before(:all) do - @fun = functions["reduce-values-sum"][LANGUAGE] - @qs.reset! - end - it "should rereduce" do - vs = (0...10).collect{|i|i} - @qs.run(["rereduce", [@fun], vs]).should == [true, [45]] - end - end - - describe "design docs" do - before(:all) do - @ddoc = { - "_id" => "foo" - } - @qs.reset! - end - it "should learn design docs" do - @qs.teach_ddoc(@ddoc).should == true - end - end - - # it "should validate" - describe "validation" do - before(:all) do - @fun = functions["validate-forbidden"][LANGUAGE] - @ddoc = make_ddoc(["validate_doc_update"], @fun) - @qs.teach_ddoc(@ddoc) - end - it "should allow good updates" do - @qs.ddoc_run(@ddoc, - ["validate_doc_update"], - [{"good" => true}, {}, {}]).should == 1 - end - it "should reject invalid updates" do - @qs.ddoc_run(@ddoc, - ["validate_doc_update"], - [{"bad" => true}, {}, {}]).should == {"forbidden"=>"bad doc"} - end - end - - describe "show" do - before(:all) do - @fun = functions["show-simple"][LANGUAGE] - @ddoc = make_ddoc(["shows","simple"], @fun) - @qs.teach_ddoc(@ddoc) - end - it "should show" do - @qs.ddoc_run(@ddoc, - ["shows","simple"], - [{:title => "Best ever", :body => "Doc body"}, {}]).should == - ["resp", {"body" => "Best ever - Doc body"}] - end - end - - describe "show with headers" do - before(:all) do - # TODO we can make real ddocs up there. - @fun = functions["show-headers"][LANGUAGE] - @ddoc = make_ddoc(["shows","headers"], @fun) - @qs.teach_ddoc(@ddoc) - end - it "should show headers" do - @qs.ddoc_run( - @ddoc, - ["shows","headers"], - [{:title => "Best ever", :body => "Doc body"}, {}] - ). - should == ["resp", {"code"=>200,"headers" => {"X-Plankton"=>"Rusty"}, "body" => "Best ever - Doc body"}] - end - end - - describe "recoverable error" do - before(:all) do - @fun = functions["error"][LANGUAGE] - @ddoc = make_ddoc(["shows","error"], @fun) - @qs.teach_ddoc(@ddoc) - end - it "should not exit" do - @qs.ddoc_run(@ddoc, ["shows","error"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["error", "error_key", "testing"] - # still running - @qs.run(["reset"]).should == true - end - end - - describe "changes filter" do - before(:all) do - @fun = functions["filter-basic"][LANGUAGE] - @ddoc = make_ddoc(["filters","basic"], @fun) - @qs.teach_ddoc(@ddoc) - end - it "should only return true for good docs" do - @qs.ddoc_run(@ddoc, - ["filters","basic"], - [[{"key"=>"bam", "good" => true}, {"foo" => "bar"}, {"good" => true}], {"req" => "foo"}] - ). - should == [true, [true, false, true]] - end - end - - describe "update" do - before(:all) do - # in another patch we can remove this duplication - # by setting up the design doc for each language ahead of time. - @fun = functions["update-basic"][LANGUAGE] - @ddoc = make_ddoc(["updates","basic"], @fun) - @qs.teach_ddoc(@ddoc) - end - it "should return a doc and a resp body" do - up, doc, resp = @qs.ddoc_run(@ddoc, - ["updates","basic"], - [{"foo" => "gnarly"}, {"method" => "POST"}] - ) - up.should == "up" - doc.should == {"foo" => "gnarly", "world" => "hello"} - resp["body"].should == "hello doc" - end - end - -# end -# LIST TESTS -# __END__ - - describe "ddoc list" do - before(:all) do - @ddoc = { - "_id" => "foo", - "lists" => { - "simple" => functions["list-simple"][LANGUAGE], - "headers" => functions["show-sends"][LANGUAGE], - "rows" => functions["show-while-get-rows"][LANGUAGE], - "buffer-chunks" => functions["show-while-get-rows-multi-send"][LANGUAGE], - "chunky" => functions["list-chunky"][LANGUAGE] - } - } - @qs.teach_ddoc(@ddoc) - end - - describe "example list" do - it "should run normal" do - @qs.ddoc_run(@ddoc, - ["lists","simple"], - [{"foo"=>"bar"}, {"q" => "ok"}] - ).should == ["start", ["first chunk", "ok"], {"headers"=>{}}] - @qs.run(["list_row", {"key"=>"baz"}]).should == ["chunks", ["baz"]] - @qs.run(["list_row", {"key"=>"bam"}]).should == ["chunks", ["bam"]] - @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]] - @qs.run(["list_row", {"key"=>"fooz"}]).should == ["chunks", ["fooz"]] - @qs.run(["list_row", {"key"=>"foox"}]).should == ["chunks", ["foox"]] - @qs.run(["list_end"]).should == ["end" , ["early"]] - end - end - - describe "headers" do - it "should do headers proper" do - @qs.ddoc_run(@ddoc, ["lists","headers"], - [{"total_rows"=>1000}, {"q" => "ok"}] - ).should == ["start", ["first chunk", 'second "chunk"'], - {"headers"=>{"Content-Type"=>"text/plain"}}] - @qs.rrun(["list_end"]) - @qs.jsgets.should == ["end", ["tail"]] - end - end - - describe "with rows" do - it "should list em" do - @qs.ddoc_run(@ddoc, ["lists","rows"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["start", ["first chunk", "ok"], {"headers"=>{}}] - @qs.rrun(["list_row", {"key"=>"baz"}]) - @qs.get_chunks.should == ["baz"] - @qs.rrun(["list_row", {"key"=>"bam"}]) - @qs.get_chunks.should == ["bam"] - @qs.rrun(["list_end"]) - @qs.jsgets.should == ["end", ["tail"]] - end - it "should work with zero rows" do - @qs.ddoc_run(@ddoc, ["lists","rows"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["start", ["first chunk", "ok"], {"headers"=>{}}] - @qs.rrun(["list_end"]) - @qs.jsgets.should == ["end", ["tail"]] - end - end - - describe "should buffer multiple chunks sent for a single row." do - it "should should buffer em" do - @qs.ddoc_run(@ddoc, ["lists","buffer-chunks"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["start", ["bacon"], {"headers"=>{}}] - @qs.rrun(["list_row", {"key"=>"baz"}]) - @qs.get_chunks.should == ["baz", "eggs"] - @qs.rrun(["list_row", {"key"=>"bam"}]) - @qs.get_chunks.should == ["bam", "eggs"] - @qs.rrun(["list_end"]) - @qs.jsgets.should == ["end", ["tail"]] - end - end - it "should end after 2" do - @qs.ddoc_run(@ddoc, ["lists","chunky"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["start", ["first chunk", "ok"], {"headers"=>{}}] - - @qs.run(["list_row", {"key"=>"baz"}]). - should == ["chunks", ["baz"]] - - @qs.run(["list_row", {"key"=>"bam"}]). - should == ["chunks", ["bam"]] - - @qs.run(["list_row", {"key"=>"foom"}]). - should == ["end", ["foom", "early tail"]] - # here's where js has to discard quit properly - @qs.run(["reset"]). - should == true - end - end - end - - - -def should_have_exited qs - begin - qs.run(["reset"]) - "raise before this (except Erlang)".should == true - rescue RuntimeError => e - e.message.should == "no response" - rescue Errno::EPIPE - true.should == true - end -end - -describe "query server that exits" do - before(:each) do - @qs = QueryServerRunner.run - @ddoc = { - "_id" => "foo", - "lists" => { - "capped" => functions["list-capped"][LANGUAGE], - "raw" => functions["list-raw"][LANGUAGE] - }, - "shows" => { - "fatal" => functions["fatal"][LANGUAGE] - } - } - @qs.teach_ddoc(@ddoc) - end - after(:each) do - @qs.close - end - - describe "only goes to 2 list" do - it "should exit if erlang sends too many rows" do - @qs.ddoc_run(@ddoc, ["lists","capped"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["start", ["bacon"], {"headers"=>{}}] - @qs.run(["list_row", {"key"=>"baz"}]).should == ["chunks", ["baz"]] - @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]] - @qs.run(["list_row", {"key"=>"fooz"}]).should == ["end", ["fooz", "early"]] - e = @qs.run(["list_row", {"key"=>"foox"}]) - e[0].should == "error" - e[1].should == "unknown_command" - should_have_exited @qs - end - end - - describe "raw list" do - it "should exit if it gets a non-row in the middle" do - @qs.ddoc_run(@ddoc, ["lists","raw"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["start", ["first chunk", "ok"], {"headers"=>{}}] - e = @qs.run(["reset"]) - e[0].should == "error" - e[1].should == "list_error" - should_have_exited @qs - end - end - - describe "fatal error" do - it "should exit" do - @qs.ddoc_run(@ddoc, ["shows","fatal"], - [{"foo"=>"bar"}, {"q" => "ok"}]). - should == ["error", "error_key", "testing"] - should_have_exited @qs - end - end -end - -describe "thank you for using the tests" do - it "for more info run with QS_TRACE=true or see query_server_spec.rb file header" do - end -end \ No newline at end of file diff --git a/test/view_server/run_native_process.es b/test/view_server/run_native_process.es deleted file mode 100755 index fcf16d75..00000000 --- a/test/view_server/run_native_process.es +++ /dev/null @@ -1,59 +0,0 @@ -#! /usr/bin/env escript - -% Licensed under the Apache License, Version 2.0 (the "License"); you may not -% use this file except in compliance with the License. You may obtain a copy of -% the License at -% -% http://www.apache.org/licenses/LICENSE-2.0 -% -% Unless required by applicable law or agreed to in writing, software -% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -% License for the specific language governing permissions and limitations under -% the License. - -read() -> - case io:get_line('') of - eof -> stop; - Data -> couch_util:json_decode(Data) - end. - -send(Data) when is_binary(Data) -> - send(binary_to_list(Data)); -send(Data) when is_list(Data) -> - io:format(Data ++ "\n", []). - -write(Data) -> - % log("~p", [Data]), - case (catch couch_util:json_encode(Data)) of - % when testing, this is what prints your errors - {json_encode, Error} -> write({[{<<"error">>, Error}]}); - Json -> send(Json) - end. - -% log(Mesg) -> -% log(Mesg, []). -% log(Mesg, Params) -> -% io:format(standard_error, Mesg, Params). -% jlog(Mesg) -> -% write([<<"log">>, list_to_binary(io_lib:format("~p",[Mesg]))]). - -loop(Pid) -> - case read() of - stop -> ok; - Json -> - case (catch couch_native_process:prompt(Pid, Json)) of - {error, Reason} -> - ok = write([error, Reason, Reason]); - Resp -> - ok = write(Resp), - loop(Pid) - end - end. - -main([]) -> - code:add_pathz("src/couchdb"), - code:add_pathz("src/mochiweb"), - {ok, Pid} = couch_native_process:start_link(), - loop(Pid). - -- cgit v1.2.3