You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by be...@apache.org on 2014/01/09 01:38:46 UTC

[01/12] move test -> src/test

Updated Branches:
  refs/heads/1994-merge-rcouch 1e685c225 -> 5b9e825d8


http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/view_server/run_native_process.es
----------------------------------------------------------------------
diff --git a/test/view_server/run_native_process.es b/test/view_server/run_native_process.es
deleted file mode 100755
index fcf16d7..0000000
--- a/test/view_server/run_native_process.es
+++ /dev/null
@@ -1,59 +0,0 @@
-#! /usr/bin/env escript
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-read() ->
-    case io:get_line('') of
-        eof -> stop;
-        Data -> couch_util:json_decode(Data)
-    end.
-
-send(Data) when is_binary(Data) ->
-    send(binary_to_list(Data));
-send(Data) when is_list(Data) ->
-    io:format(Data ++ "\n", []).
-
-write(Data) ->
-    % log("~p", [Data]),
-    case (catch couch_util:json_encode(Data)) of
-        % when testing, this is what prints your errors
-        {json_encode, Error} -> write({[{<<"error">>, Error}]});
-        Json -> send(Json)
-    end.
-
-% log(Mesg) ->
-%    log(Mesg, []).
-% log(Mesg, Params) ->
-%    io:format(standard_error, Mesg, Params).
-% jlog(Mesg) ->
-%     write([<<"log">>, list_to_binary(io_lib:format("~p",[Mesg]))]).
-
-loop(Pid) ->
-    case read() of
-        stop -> ok;
-        Json ->
-            case (catch couch_native_process:prompt(Pid, Json)) of
-                {error, Reason} ->
-                    ok = write([error, Reason, Reason]);
-                Resp ->
-                    ok = write(Resp),
-                    loop(Pid)
-            end
-    end.
-
-main([]) ->
-    code:add_pathz("src/couchdb"),
-    code:add_pathz("src/mochiweb"),
-    {ok, Pid} = couch_native_process:start_link(),
-    loop(Pid).
-


[11/12] git commit: updated refs/heads/1994-merge-rcouch to 5b9e825

Posted by be...@apache.org.
move test -> src/test


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/ec7ee43f
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/ec7ee43f
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/ec7ee43f

Branch: refs/heads/1994-merge-rcouch
Commit: ec7ee43f1731e1b48dd02bc55efe2f33b72452fe
Parents: 1e685c2
Author: Benoit Chesneau <be...@apache.org>
Authored: Thu Jan 9 01:32:33 2014 +0100
Committer: Benoit Chesneau <be...@apache.org>
Committed: Thu Jan 9 01:32:33 2014 +0100

----------------------------------------------------------------------
 src/test/Makefile.am                            |  15 +
 src/test/bench/Makefile.am                      |  22 +
 src/test/bench/bench_marks.js                   | 103 +++
 src/test/bench/benchbulk.sh                     |  69 ++
 src/test/bench/run.tpl                          |  28 +
 src/test/etap/001-load.t                        |  68 ++
 src/test/etap/002-icu-driver.t                  |  33 +
 src/test/etap/010-file-basics.t                 | 113 +++
 src/test/etap/011-file-headers.t                | 152 ++++
 src/test/etap/020-btree-basics.t                | 265 ++++++
 src/test/etap/021-btree-reductions.t            | 237 ++++++
 src/test/etap/030-doc-from-json.t               | 236 ++++++
 src/test/etap/031-doc-to-json.t                 | 197 +++++
 src/test/etap/040-util.t                        |  80 ++
 src/test/etap/041-uuid-gen-id.ini               |  20 +
 src/test/etap/041-uuid-gen-seq.ini              |  19 +
 src/test/etap/041-uuid-gen-utc.ini              |  19 +
 src/test/etap/041-uuid-gen.t                    | 147 ++++
 src/test/etap/042-work-queue.t                  | 500 +++++++++++
 src/test/etap/050-stream.t                      |  87 ++
 src/test/etap/060-kt-merging.t                  | 176 ++++
 src/test/etap/061-kt-missing-leaves.t           |  65 ++
 src/test/etap/062-kt-remove-leaves.t            |  69 ++
 src/test/etap/063-kt-get-leaves.t               |  98 +++
 src/test/etap/064-kt-counting.t                 |  46 ++
 src/test/etap/065-kt-stemming.t                 |  42 +
 src/test/etap/070-couch-db.t                    |  73 ++
 src/test/etap/072-cleanup.t                     | 126 +++
 src/test/etap/073-changes.t                     | 558 +++++++++++++
 src/test/etap/074-doc-update-conflicts.t        | 218 +++++
 src/test/etap/075-auth-cache.t                  | 276 +++++++
 src/test/etap/076-file-compression.t            | 186 +++++
 .../etap/077-couch-db-fast-db-delete-create.t   |  61 ++
 src/test/etap/080-config-get-set.t              | 128 +++
 src/test/etap/081-config-override.1.ini         |  22 +
 src/test/etap/081-config-override.2.ini         |  22 +
 src/test/etap/081-config-override.t             | 212 +++++
 src/test/etap/082-config-register.t             |  94 +++
 src/test/etap/083-config-no-files.t             |  53 ++
 src/test/etap/090-task-status.t                 | 279 +++++++
 src/test/etap/100-ref-counter.t                 | 114 +++
 src/test/etap/120-stats-collect.t               | 150 ++++
 src/test/etap/121-stats-aggregates.cfg          |  19 +
 src/test/etap/121-stats-aggregates.ini          |  20 +
 src/test/etap/121-stats-aggregates.t            | 171 ++++
 src/test/etap/130-attachments-md5.t             | 248 ++++++
 src/test/etap/140-attachment-comp.t             | 728 ++++++++++++++++
 src/test/etap/150-invalid-view-seq.t            | 183 ++++
 src/test/etap/160-vhosts.t                      | 371 +++++++++
 src/test/etap/170-os-daemons.es                 |  26 +
 src/test/etap/170-os-daemons.t                  | 114 +++
 src/test/etap/171-os-daemons-config.es          |  85 ++
 src/test/etap/171-os-daemons-config.t           |  74 ++
 src/test/etap/172-os-daemon-errors.1.sh         |  17 +
 src/test/etap/172-os-daemon-errors.2.sh         |  15 +
 src/test/etap/172-os-daemon-errors.3.sh         |  15 +
 src/test/etap/172-os-daemon-errors.4.sh         |  15 +
 src/test/etap/172-os-daemon-errors.t            | 126 +++
 src/test/etap/173-os-daemon-cfg-register.t      | 116 +++
 src/test/etap/180-http-proxy.ini                |  20 +
 src/test/etap/180-http-proxy.t                  | 376 +++++++++
 src/test/etap/190-json-stream-parse.t           | 184 +++++
 src/test/etap/200-view-group-no-db-leaks.t      | 307 +++++++
 src/test/etap/201-view-group-shutdown.t         | 293 +++++++
 src/test/etap/210-os-proc-pool.t                | 163 ++++
 src/test/etap/220-compaction-daemon.t           | 225 +++++
 src/test/etap/230-pbkfd2.t                      |  38 +
 src/test/etap/231-cors.t                        | 433 ++++++++++
 src/test/etap/250-upgrade-legacy-view-files.t   | 168 ++++
 src/test/etap/Makefile.am                       | 108 +++
 .../3b835456c235b1827e012e25666152f3.view       | Bin 0 -> 4192 bytes
 src/test/etap/fixtures/test.couch               | Bin 0 -> 16482 bytes
 src/test/etap/run.tpl                           |  32 +
 src/test/etap/test_cfg_register.c               |  31 +
 src/test/etap/test_util.erl.in                  |  94 +++
 src/test/etap/test_web.erl                      |  99 +++
 src/test/javascript/Makefile.am                 |  27 +
 src/test/javascript/cli_runner.js               |  47 ++
 src/test/javascript/couch_http.js               |  73 ++
 src/test/javascript/run.tpl                     | 138 ++++
 src/test/javascript/test_setup.js               |  89 ++
 src/test/random_port.ini                        |  19 +
 src/test/view_server/Makefile.am                |  15 +
 src/test/view_server/query_server_spec.rb       | 824 +++++++++++++++++++
 src/test/view_server/run_native_process.es      |  59 ++
 test/Makefile.am                                |  15 -
 test/bench/Makefile.am                          |  22 -
 test/bench/bench_marks.js                       | 103 ---
 test/bench/benchbulk.sh                         |  69 --
 test/bench/run.tpl                              |  28 -
 test/etap/001-load.t                            |  68 --
 test/etap/002-icu-driver.t                      |  33 -
 test/etap/010-file-basics.t                     | 113 ---
 test/etap/011-file-headers.t                    | 152 ----
 test/etap/020-btree-basics.t                    | 265 ------
 test/etap/021-btree-reductions.t                | 237 ------
 test/etap/030-doc-from-json.t                   | 236 ------
 test/etap/031-doc-to-json.t                     | 197 -----
 test/etap/040-util.t                            |  80 --
 test/etap/041-uuid-gen-id.ini                   |  20 -
 test/etap/041-uuid-gen-seq.ini                  |  19 -
 test/etap/041-uuid-gen-utc.ini                  |  19 -
 test/etap/041-uuid-gen.t                        | 147 ----
 test/etap/042-work-queue.t                      | 500 -----------
 test/etap/050-stream.t                          |  87 --
 test/etap/060-kt-merging.t                      | 176 ----
 test/etap/061-kt-missing-leaves.t               |  65 --
 test/etap/062-kt-remove-leaves.t                |  69 --
 test/etap/063-kt-get-leaves.t                   |  98 ---
 test/etap/064-kt-counting.t                     |  46 --
 test/etap/065-kt-stemming.t                     |  42 -
 test/etap/070-couch-db.t                        |  73 --
 test/etap/072-cleanup.t                         | 126 ---
 test/etap/073-changes.t                         | 558 -------------
 test/etap/074-doc-update-conflicts.t            | 218 -----
 test/etap/075-auth-cache.t                      | 276 -------
 test/etap/076-file-compression.t                | 186 -----
 test/etap/077-couch-db-fast-db-delete-create.t  |  61 --
 test/etap/080-config-get-set.t                  | 128 ---
 test/etap/081-config-override.1.ini             |  22 -
 test/etap/081-config-override.2.ini             |  22 -
 test/etap/081-config-override.t                 | 212 -----
 test/etap/082-config-register.t                 |  94 ---
 test/etap/083-config-no-files.t                 |  53 --
 test/etap/090-task-status.t                     | 279 -------
 test/etap/100-ref-counter.t                     | 114 ---
 test/etap/120-stats-collect.t                   | 150 ----
 test/etap/121-stats-aggregates.cfg              |  19 -
 test/etap/121-stats-aggregates.ini              |  20 -
 test/etap/121-stats-aggregates.t                | 171 ----
 test/etap/130-attachments-md5.t                 | 248 ------
 test/etap/140-attachment-comp.t                 | 728 ----------------
 test/etap/150-invalid-view-seq.t                | 183 ----
 test/etap/160-vhosts.t                          | 371 ---------
 test/etap/170-os-daemons.es                     |  26 -
 test/etap/170-os-daemons.t                      | 114 ---
 test/etap/171-os-daemons-config.es              |  85 --
 test/etap/171-os-daemons-config.t               |  74 --
 test/etap/172-os-daemon-errors.1.sh             |  17 -
 test/etap/172-os-daemon-errors.2.sh             |  15 -
 test/etap/172-os-daemon-errors.3.sh             |  15 -
 test/etap/172-os-daemon-errors.4.sh             |  15 -
 test/etap/172-os-daemon-errors.t                | 126 ---
 test/etap/173-os-daemon-cfg-register.t          | 116 ---
 test/etap/180-http-proxy.ini                    |  20 -
 test/etap/180-http-proxy.t                      | 376 ---------
 test/etap/190-json-stream-parse.t               | 184 -----
 test/etap/200-view-group-no-db-leaks.t          | 307 -------
 test/etap/201-view-group-shutdown.t             | 293 -------
 test/etap/210-os-proc-pool.t                    | 163 ----
 test/etap/220-compaction-daemon.t               | 225 -----
 test/etap/230-pbkfd2.t                          |  38 -
 test/etap/231-cors.t                            | 433 ----------
 test/etap/250-upgrade-legacy-view-files.t       | 168 ----
 test/etap/Makefile.am                           | 108 ---
 .../3b835456c235b1827e012e25666152f3.view       | Bin 4192 -> 0 bytes
 test/etap/fixtures/test.couch                   | Bin 16482 -> 0 bytes
 test/etap/run.tpl                               |  32 -
 test/etap/test_cfg_register.c                   |  31 -
 test/etap/test_util.erl.in                      |  94 ---
 test/etap/test_web.erl                          |  99 ---
 test/javascript/Makefile.am                     |  27 -
 test/javascript/cli_runner.js                   |  47 --
 test/javascript/couch_http.js                   |  73 --
 test/javascript/run.tpl                         | 138 ----
 test/javascript/test_setup.js                   |  89 --
 test/random_port.ini                            |  19 -
 test/view_server/Makefile.am                    |  15 -
 test/view_server/query_server_spec.rb           | 824 -------------------
 test/view_server/run_native_process.es          |  59 --
 170 files changed, 11683 insertions(+), 11683 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/Makefile.am
----------------------------------------------------------------------
diff --git a/src/test/Makefile.am b/src/test/Makefile.am
new file mode 100644
index 0000000..7c70a5a
--- /dev/null
+++ b/src/test/Makefile.am
@@ -0,0 +1,15 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+SUBDIRS = bench etap javascript view_server
+EXTRA_DIST = random_port.ini
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/bench/Makefile.am
----------------------------------------------------------------------
diff --git a/src/test/bench/Makefile.am b/src/test/bench/Makefile.am
new file mode 100644
index 0000000..ce39c4b
--- /dev/null
+++ b/src/test/bench/Makefile.am
@@ -0,0 +1,22 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+EXTRA_DIST = benchbulk.sh bench_marks.js run.tpl
+
+noinst_SCRIPTS = run
+CLEANFILES = run
+
+run: run.tpl
+	sed -e "s|%abs_top_srcdir%|$(abs_top_srcdir)|" \
+		-e "s|%abs_top_builddir%|$(abs_top_builddir)|" \
+	< $< > $@
+	chmod +x $@

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/bench/bench_marks.js
----------------------------------------------------------------------
diff --git a/src/test/bench/bench_marks.js b/src/test/bench/bench_marks.js
new file mode 100644
index 0000000..4025adb
--- /dev/null
+++ b/src/test/bench/bench_marks.js
@@ -0,0 +1,103 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+var NUM_DOCS = 2000;
+var NUM_BATCHES = 20;
+
+var init = function() {
+  var db = new CouchDB("bench_mark_db", {"X-Couch-Full-Commit": "false"});
+  db.deleteDb();
+  db.createDb();
+  return db;
+};
+
+var timeit = function(func) {
+  var startTime = (new Date()).getTime();
+  func();
+  return ((new Date()).getTime() - startTime) / 1000;
+};
+
+var report = function(name, rate) {
+  rate = Math.round(parseFloat(rate) * 100) / 100;
+  console.log("" + name + ": " + rate + " docs/second");
+};
+
+var makeDocs = function(n) {
+  docs = [];
+  for (var i=0; i < n; i++) {
+    docs.push({"foo":"bar"});
+  };
+  return docs;
+};
+
+var couchTests = {};
+
+couchTests.single_doc_insert = function() {
+  var db = init();
+  var len = timeit(function() {
+    for(var i = 0; i < NUM_DOCS; i++) {
+      db.save({"foo": "bar"});
+    }
+  });
+  report("Single doc inserts", NUM_DOCS/len);
+};
+
+couchTests.batch_ok_doc_insert = function() {
+  var db = init();
+  var len = timeit(function() {
+    for(var i = 0; i < NUM_DOCS; i++) {
+      db.save({"foo":"bar"}, {"batch":"ok"});
+    }
+  });
+  report("Single doc inserts with batch=ok", NUM_DOCS/len);
+};
+
+couchTests.bulk_doc_100 = function() {
+  var db = init();
+  var len = timeit(function() {
+    for(var i = 0; i < NUM_BATCHES; i++) {
+      db.bulkSave(makeDocs(100));
+    }
+  });
+  report("Bulk docs - 100", (NUM_BATCHES*100)/len);
+};
+      
+couchTests.bulk_doc_1000 = function() {
+  var db = init();
+  var len = timeit(function() {
+    for(var i = 0; i < NUM_BATCHES; i++) {
+      db.bulkSave(makeDocs(1000));
+    }
+  });
+  report("Bulk docs - 1000", (NUM_BATCHES*1000)/len);
+};
+
+
+couchTests.bulk_doc_5000 = function() {
+  var db = init();
+  var len = timeit(function() {
+    for(var i = 0; i < NUM_BATCHES; i++) {
+      db.bulkSave(makeDocs(5000));
+    }
+  });
+  report("Bulk docs - 5000", (NUM_BATCHES*5000)/len);
+};
+
+couchTests.bulk_doc_10000 = function() {
+  var db = init();
+  var len = timeit(function() {
+    for(var i = 0; i < NUM_BATCHES; i++) {
+      db.bulkSave(makeDocs(10000));
+    }
+  });
+  report("Bulk docs - 10000", (NUM_BATCHES*10000)/len);
+};

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/bench/benchbulk.sh
----------------------------------------------------------------------
diff --git a/src/test/bench/benchbulk.sh b/src/test/bench/benchbulk.sh
new file mode 100755
index 0000000..55c72e4
--- /dev/null
+++ b/src/test/bench/benchbulk.sh
@@ -0,0 +1,69 @@
+#!/bin/sh -e
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+
+# usage: time benchbulk.sh
+# it takes about 30 seconds to run on my old MacBook with bulksize 1000
+
+BULKSIZE=100
+DOCSIZE=10
+INSERTS=10
+ROUNDS=10
+DBURL="http://127.0.0.1:5984/benchbulk"
+POSTURL="$DBURL/_bulk_docs"
+
+function make_bulk_docs() {
+  ROW=0
+  SIZE=$(($1-1))
+  START=$2
+  BODYSIZE=$3  
+  
+  BODY=$(printf "%0${BODYSIZE}d")
+
+  echo '{"docs":['
+  while [ $ROW -lt $SIZE ]; do
+    printf '{"_id":"%020d", "body":"'$BODY'"},' $(($ROW + $START))
+    let ROW=ROW+1
+  done
+  printf '{"_id":"%020d", "body":"'$BODY'"}' $(($ROW + $START))
+  echo ']}'
+}
+
+echo "Making $INSERTS bulk inserts of $BULKSIZE docs each"
+
+echo "Attempt to delete db at $DBURL"
+curl -X DELETE $DBURL -w\\n
+
+echo "Attempt to create db at $DBURL"
+curl -X PUT $DBURL -w\\n
+
+echo "Running $ROUNDS rounds of $INSERTS concurrent inserts to $POSTURL"
+RUN=0
+while [ $RUN -lt $ROUNDS ]; do
+
+  POSTS=0
+  while [ $POSTS -lt $INSERTS ]; do
+    STARTKEY=$[ POSTS * BULKSIZE + RUN * BULKSIZE * INSERTS ]
+    echo "startkey $STARTKEY bulksize $BULKSIZE"
+    DOCS=$(make_bulk_docs $BULKSIZE $STARTKEY $DOCSIZE)
+    # echo $DOCS
+    echo $DOCS | curl -T - -H Content-Type:application/json -X POST $POSTURL -w%{http_code}\ %{time_total}\ sec\\n >/dev/null 2>&1 &
+    let POSTS=POSTS+1
+  done
+
+  echo "waiting"
+  wait
+  let RUN=RUN+1
+done
+
+curl $DBURL -w\\n

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/bench/run.tpl
----------------------------------------------------------------------
diff --git a/src/test/bench/run.tpl b/src/test/bench/run.tpl
new file mode 100755
index 0000000..9307863
--- /dev/null
+++ b/src/test/bench/run.tpl
@@ -0,0 +1,28 @@
+#!/bin/sh -e
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+SRC_DIR=%abs_top_srcdir%
+SCRIPT_DIR=$SRC_DIR/share/www/script
+JS_TEST_DIR=$SRC_DIR/test/javascript
+JS_BENCH_DIR=$SRC_DIR/test/bench
+
+COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs
+
+cat $SCRIPT_DIR/json2.js \
+    $SCRIPT_DIR/couch.js \
+    $JS_TEST_DIR/couch_http.js \
+    $JS_BENCH_DIR/bench_marks.js \
+    $JS_TEST_DIR/cli_runner.js \
+    | $COUCHJS -
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/001-load.t
----------------------------------------------------------------------
diff --git a/src/test/etap/001-load.t b/src/test/etap/001-load.t
new file mode 100755
index 0000000..5ce0d93
--- /dev/null
+++ b/src/test/etap/001-load.t
@@ -0,0 +1,68 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+% Test that we can load each module.
+
+main(_) ->
+    test_util:init_code_path(),
+    Modules = [
+        couch_auth_cache,
+        couch_btree,
+        couch_changes,
+        couch_compress,
+        couch_config,
+        couch_config_writer,
+        couch_db,
+        couch_db_update_notifier,
+        couch_db_update_notifier_sup,
+        couch_db_updater,
+        couch_doc,
+        % Fails unless couch_config gen_server is started.
+        % couch_ejson_compare,
+        couch_event_sup,
+        couch_external_manager,
+        couch_external_server,
+        couch_file,
+        couch_httpd,
+        couch_httpd_db,
+        couch_httpd_external,
+        couch_httpd_misc_handlers,
+        couch_httpd_rewrite,
+        couch_httpd_stats_handlers,
+        couch_key_tree,
+        couch_log,
+        couch_os_process,
+        couch_query_servers,
+        couch_ref_counter,
+        couch_server,
+        couch_server_sup,
+        couch_stats_aggregator,
+        couch_stats_collector,
+        couch_stream,
+        couch_task_status,
+        couch_util,
+        couch_work_queue,
+        json_stream_parse
+    ],
+
+    etap:plan(length(Modules)),
+    lists:foreach(
+        fun(Module) ->
+            etap:loaded_ok(
+                Module,
+                lists:concat(["Loaded: ", Module])
+            )
+        end, Modules),
+    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/002-icu-driver.t
----------------------------------------------------------------------
diff --git a/src/test/etap/002-icu-driver.t b/src/test/etap/002-icu-driver.t
new file mode 100755
index 0000000..e233533
--- /dev/null
+++ b/src/test/etap/002-icu-driver.t
@@ -0,0 +1,33 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    couch_config:start_link(test_util:config_files()),
+    etap:plan(3),
+    etap:is(
+        element(1, couch_drv:start_link()),
+        ok,
+        "Started couch_icu_driver."
+    ),
+    etap:is(
+        couch_util:collate(<<"foo">>, <<"bar">>),
+        1,
+        "Can collate stuff"
+    ),
+    etap:is(
+        couch_util:collate(<<"A">>, <<"aa">>),
+        -1,
+        "Collate's non-ascii style."
+    ),
+    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/010-file-basics.t
----------------------------------------------------------------------
diff --git a/src/test/etap/010-file-basics.t b/src/test/etap/010-file-basics.t
new file mode 100755
index 0000000..fb1b29e
--- /dev/null
+++ b/src/test/etap/010-file-basics.t
@@ -0,0 +1,113 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-define(etap_match(Got, Expected, Desc),
+        etap:fun_is(fun(XXXXXX) ->
+            case XXXXXX of Expected -> true; _ -> false end
+        end, Got, Desc)).
+
+filename() -> test_util:build_file("test/etap/temp.010").
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(19),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    etap:is({error, enoent}, couch_file:open("not a real file"),
+        "Opening a non-existant file should return an enoent error."),
+
+    etap:fun_is(
+        fun({ok, _}) -> true; (_) -> false end,
+        couch_file:open(filename() ++ ".1", [create, invalid_option]),
+        "Invalid flags to open are ignored."
+    ),
+
+    {ok, Fd} = couch_file:open(filename() ++ ".0", [create, overwrite]),
+    etap:ok(is_pid(Fd),
+        "Returned file descriptor is a Pid"),
+
+    etap:is({ok, 0}, couch_file:bytes(Fd),
+        "Newly created files have 0 bytes."),
+
+    ?etap_match(couch_file:append_term(Fd, foo), {ok, 0, _},
+        "Appending a term returns the previous end of file position."),
+
+    {ok, Size} = couch_file:bytes(Fd),
+    etap:is_greater(Size, 0,
+        "Writing a term increased the file size."),
+
+    ?etap_match(couch_file:append_binary(Fd, <<"fancy!">>), {ok, Size, _},
+        "Appending a binary returns the current file size."),
+
+    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
+        "Reading the first term returns what we wrote: foo"),
+
+    etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size),
+        "Reading back the binary returns what we wrote: <<\"fancy\">>."),
+
+    etap:is({ok, couch_compress:compress(foo, snappy)},
+        couch_file:pread_binary(Fd, 0),
+        "Reading a binary at a term position returns the term as binary."
+    ),
+
+    {ok, BinPos, _} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
+    etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
+        "Reading a term from a written binary term representation succeeds."),
+        
+    BigBin = list_to_binary(lists:duplicate(100000, 0)),
+    {ok, BigBinPos, _} = couch_file:append_binary(Fd, BigBin),
+    etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos),
+        "Reading a large term from a written representation succeeds."),
+    
+    ok = couch_file:write_header(Fd, hello),
+    etap:is({ok, hello}, couch_file:read_header(Fd),
+        "Reading a header succeeds."),
+        
+    {ok, BigBinPos2, _} = couch_file:append_binary(Fd, BigBin),
+    etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2),
+        "Reading a large term from a written representation succeeds 2."),
+
+    % append_binary == append_iolist?
+    % Possible bug in pread_iolist or iolist() -> append_binary
+    {ok, IOLPos, _} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
+    {ok, IoList} = couch_file:pread_iolist(Fd, IOLPos),
+    etap:is(<<"foombam">>, iolist_to_binary(IoList),
+        "Reading an results in a binary form of the written iolist()"),
+
+    % XXX: How does on test fsync?
+    etap:is(ok, couch_file:sync(Fd),
+        "Syncing does not cause an error."),
+
+    etap:is(ok, couch_file:truncate(Fd, Size),
+        "Truncating a file succeeds."),
+
+    %etap:is(eof, (catch couch_file:pread_binary(Fd, Size)),
+    %    "Reading data that was truncated fails.")
+    etap:skip(fun() -> ok end,
+        "No idea how to test reading beyond EOF"),
+
+    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
+        "Truncating does not affect data located before the truncation mark."),
+
+    etap:is(ok, couch_file:close(Fd),
+        "Files close properly."),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/011-file-headers.t
----------------------------------------------------------------------
diff --git a/src/test/etap/011-file-headers.t b/src/test/etap/011-file-headers.t
new file mode 100755
index 0000000..a26b032
--- /dev/null
+++ b/src/test/etap/011-file-headers.t
@@ -0,0 +1,152 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+filename() -> test_util:build_file("test/etap/temp.011").
+sizeblock() -> 4096. % Need to keep this in sync with couch_file.erl
+
+main(_) ->
+    test_util:init_code_path(),
+    {S1, S2, S3} = now(),
+    random:seed(S1, S2, S3),
+
+    etap:plan(18),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
+
+    etap:is({ok, 0}, couch_file:bytes(Fd),
+        "File should be initialized to contain zero bytes."),
+
+    etap:is(ok, couch_file:write_header(Fd, {<<"some_data">>, 32}),
+        "Writing a header succeeds."),
+
+    {ok, Size1} = couch_file:bytes(Fd),
+    etap:is_greater(Size1, 0,
+        "Writing a header allocates space in the file."),
+
+    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
+        "Reading the header returns what we wrote."),
+
+    etap:is(ok, couch_file:write_header(Fd, [foo, <<"more">>]),
+        "Writing a second header succeeds."),
+
+    {ok, Size2} = couch_file:bytes(Fd),
+    etap:is_greater(Size2, Size1,
+        "Writing a second header allocates more space."),
+
+    etap:is({ok, [foo, <<"more">>]}, couch_file:read_header(Fd),
+        "Reading the second header does not return the first header."),
+
+    % Delete the second header.
+    ok = couch_file:truncate(Fd, Size1),
+
+    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
+        "Reading the header after a truncation returns a previous header."),
+
+    couch_file:write_header(Fd, [foo, <<"more">>]),
+    etap:is({ok, Size2}, couch_file:bytes(Fd),
+        "Rewriting the same second header returns the same second size."),
+
+    couch_file:write_header(Fd, erlang:make_tuple(5000, <<"CouchDB">>)),
+    etap:is(
+        couch_file:read_header(Fd),
+        {ok, erlang:make_tuple(5000, <<"CouchDB">>)},
+        "Headers larger than the block size can be saved (COUCHDB-1319)"
+    ),
+
+    ok = couch_file:close(Fd),
+
+    % Now for the fun stuff. Try corrupting the second header and see
+    % if we recover properly.
+
+    % Destroy the 0x1 byte that marks a header
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        file:pwrite(RawFd, HeaderPos, <<0>>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the byte marker should read the previous header.")
+    end),
+
+    % Corrupt the size.
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        % +1 for 0x1 byte marker
+        file:pwrite(RawFd, HeaderPos+1, <<10/integer>>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the size should read the previous header.")
+    end),
+
+    % Corrupt the MD5 signature
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        % +5 = +1 for 0x1 byte and +4 for term size.
+        file:pwrite(RawFd, HeaderPos+5, <<"F01034F88D320B22">>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the MD5 signature should read the previous header.")
+    end),
+
+    % Corrupt the data
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig
+        file:pwrite(RawFd, HeaderPos+21, <<"some data goes here!">>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the header data should read the previous header.")
+    end),
+
+    ok.
+
+check_header_recovery(CheckFun) ->
+    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
+    {ok, RawFd} = file:open(filename(), [read, write, raw, binary]),
+
+    {ok, _} = write_random_data(Fd),
+    ExpectHeader = {some_atom, <<"a binary">>, 756},
+    ok = couch_file:write_header(Fd, ExpectHeader),
+
+    {ok, HeaderPos} = write_random_data(Fd),
+    ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}),
+
+    CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos),
+
+    ok = file:close(RawFd),
+    ok = couch_file:close(Fd),
+    ok.
+
+write_random_data(Fd) ->
+    write_random_data(Fd, 100 + random:uniform(1000)).
+
+write_random_data(Fd, 0) ->
+    {ok, Bytes} = couch_file:bytes(Fd),
+    {ok, (1 + Bytes div sizeblock()) * sizeblock()};
+write_random_data(Fd, N) ->
+    Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]],
+    Term = lists:nth(random:uniform(4) + 1, Choices),
+    {ok, _, _} = couch_file:append_term(Fd, Term),
+    write_random_data(Fd, N-1).
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/020-btree-basics.t
----------------------------------------------------------------------
diff --git a/src/test/etap/020-btree-basics.t b/src/test/etap/020-btree-basics.t
new file mode 100755
index 0000000..b0fb2d2
--- /dev/null
+++ b/src/test/etap/020-btree-basics.t
@@ -0,0 +1,265 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+filename() -> test_util:build_file("test/etap/temp.020").
+rows() -> 250.
+
+-record(btree, {
+    fd,
+    root,
+    extract_kv,
+    assemble_kv,
+    less,
+    reduce,
+    compression
+}).
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(75),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+%% @todo Determine if this number should be greater to see if the btree was
+%% broken into multiple nodes. AKA "How do we appropiately detect if multiple
+%% nodes were created."
+test()->
+    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, rows())],
+    etap:ok(test_kvs(Sorted), "Testing sorted keys"),
+    etap:ok(test_kvs(lists:reverse(Sorted)), "Testing reversed sorted keys"),
+    etap:ok(test_kvs(shuffle(Sorted)), "Testing shuffled keys."),
+    ok.
+
+test_kvs(KeyValues) ->
+    ReduceFun = fun
+        (reduce, KVs) ->
+            length(KVs);
+        (rereduce, Reds) ->
+            lists:sum(Reds)
+    end,
+
+    Keys = [K || {K, _} <- KeyValues],
+
+    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
+    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none}]),
+    etap:ok(is_record(Btree, btree), "Created btree is really a btree record"),
+    etap:is(Btree#btree.fd, Fd, "Btree#btree.fd is set correctly."),
+    etap:is(Btree#btree.root, nil, "Btree#btree.root is set correctly."),
+    etap:is(0, couch_btree:size(Btree), "Empty btrees have a 0 size."),
+
+    Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]),
+    etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"),
+    {ok, _, EmptyRes} = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0),
+    etap:is(EmptyRes, 0, "Folding over an empty btree"),
+
+    {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []),
+    etap:ok(test_btree(Btree2, KeyValues),
+        "Adding all keys at once returns a complete btree."),
+
+    etap:is((couch_btree:size(Btree2) > 0), true,
+            "Non empty btrees have a size > 0."),
+    etap:is((couch_btree:size(Btree2) =< couch_file:bytes(Fd)), true,
+            "Btree size is <= file size."),
+
+    etap:fun_is(
+        fun
+            ({ok, {kp_node, _}}) -> true;
+            (_) -> false
+        end,
+        couch_file:pread_term(Fd, element(1, Btree2#btree.root)),
+        "Btree root pointer is a kp_node."
+    ),
+
+    {ok, Btree3} = couch_btree:add_remove(Btree2, [], Keys),
+    etap:ok(test_btree(Btree3, []),
+        "Removing all keys at once returns an empty btree."),
+
+    etap:is(0, couch_btree:size(Btree3),
+            "After removing all keys btree size is 0."),
+
+    {Btree4, _} = lists:foldl(fun(KV, {BtAcc, PrevSize}) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+        case couch_btree:size(BtAcc2) > PrevSize of
+        true ->
+            ok;
+        false ->
+            etap:bail("After inserting a value, btree size did not increase.")
+        end,
+        {BtAcc2, couch_btree:size(BtAcc2)}
+    end, {Btree3, couch_btree:size(Btree3)}, KeyValues),
+
+    etap:ok(test_btree(Btree4, KeyValues),
+        "Adding all keys one at a time returns a complete btree."),
+    etap:is((couch_btree:size(Btree4) > 0), true,
+            "Non empty btrees have a size > 0."),
+
+    {Btree5, _} = lists:foldl(fun({K, _}, {BtAcc, PrevSize}) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
+        case couch_btree:size(BtAcc2) < PrevSize of
+        true ->
+            ok;
+        false ->
+            etap:bail("After removing a key, btree size did not decrease.")
+        end,
+        {BtAcc2, couch_btree:size(BtAcc2)}
+    end, {Btree4, couch_btree:size(Btree4)}, KeyValues),
+    etap:ok(test_btree(Btree5, []),
+        "Removing all keys one at a time returns an empty btree."),
+    etap:is(0, couch_btree:size(Btree5),
+            "After removing all keys, one by one, btree size is 0."),
+
+    KeyValuesRev = lists:reverse(KeyValues),
+    {Btree6, _} = lists:foldl(fun(KV, {BtAcc, PrevSize}) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+        case couch_btree:size(BtAcc2) > PrevSize of
+        true ->
+            ok;
+        false ->
+            etap:is(false, true,
+                   "After inserting a value, btree size did not increase.")
+        end,
+        {BtAcc2, couch_btree:size(BtAcc2)}
+    end, {Btree5, couch_btree:size(Btree5)}, KeyValuesRev),
+    etap:ok(test_btree(Btree6, KeyValues),
+        "Adding all keys in reverse order returns a complete btree."),
+
+    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
+        case Count rem 2 == 0 of
+            true-> {Count+1, [X | Left], Right};
+            false -> {Count+1, Left, [X | Right]}
+        end
+    end, {0, [], []}, KeyValues),
+
+    etap:ok(test_add_remove(Btree6, Rem2Keys0, Rem2Keys1),
+        "Add/Remove every other key."),
+
+    etap:ok(test_add_remove(Btree6, Rem2Keys1, Rem2Keys0),
+        "Add/Remove opposite every other key."),
+
+    Size1 = couch_btree:size(Btree6),
+    {ok, Btree7} = couch_btree:add_remove(Btree6, [], [K||{K,_}<-Rem2Keys1]),
+    Size2 = couch_btree:size(Btree7),
+    etap:is((Size2 < Size1), true, "Btree size decreased"),
+    {ok, Btree8} = couch_btree:add_remove(Btree7, [], [K||{K,_}<-Rem2Keys0]),
+    Size3 = couch_btree:size(Btree8),
+    etap:is((Size3 < Size2), true, "Btree size decreased"),
+    etap:is(Size3, 0, "Empty btree has size 0."),
+    etap:ok(test_btree(Btree8, []),
+        "Removing both halves of every other key returns an empty btree."),
+
+    %% Third chunk (close out)
+    etap:is(couch_file:close(Fd), ok, "closing out"),
+    true.
+
+test_btree(Btree, KeyValues) ->
+    ok = test_key_access(Btree, KeyValues),
+    ok = test_lookup_access(Btree, KeyValues),
+    ok = test_final_reductions(Btree, KeyValues),
+    ok = test_traversal_callbacks(Btree, KeyValues),
+    true.
+
+test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
+    Btree2 = lists:foldl(fun({K, _}, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
+        BtAcc2
+    end, Btree, OutKeyValues),
+    true = test_btree(Btree2, RemainingKeyValues),
+
+    Btree3 = lists:foldl(fun(KV, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+        BtAcc2
+    end, Btree2, OutKeyValues),
+    true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues).
+
+test_key_access(Btree, List) ->
+    FoldFun = fun(Element, {[HAcc|TAcc], Count}) ->
+        case Element == HAcc of
+            true -> {ok, {TAcc, Count + 1}};
+            _ -> {ok, {TAcc, Count + 1}}
+        end
+    end,
+    Length = length(List),
+    Sorted = lists:sort(List),
+    {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}),
+    {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun, {Sorted, 0}, [{dir, rev}]),
+    ok.
+
+test_lookup_access(Btree, KeyValues) ->
+    FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end,
+    lists:foreach(fun({Key, Value}) ->
+        [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]),
+        {ok, _, true} = couch_btree:foldl(Btree, FoldFun, {Key, Value}, [{start_key, Key}])
+    end, KeyValues).
+
+test_final_reductions(Btree, KeyValues) ->
+    KVLen = length(KeyValues),
+    FoldLFun = fun(_X, LeadingReds, Acc) ->
+        CountToStart = KVLen div 3 + Acc,
+        CountToStart = couch_btree:final_reduce(Btree, LeadingReds),
+        {ok, Acc+1}
+    end,
+    FoldRFun = fun(_X, LeadingReds, Acc) ->
+        CountToEnd = KVLen - KVLen div 3 + Acc,
+        CountToEnd = couch_btree:final_reduce(Btree, LeadingReds),
+        {ok, Acc+1}
+    end,
+    {LStartKey, _} = case KVLen of
+        0 -> {nil, nil};
+        _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues))
+    end,
+    {RStartKey, _} = case KVLen of
+        0 -> {nil, nil};
+        _ -> lists:nth(KVLen div 3, lists:sort(KeyValues))
+    end,
+    {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0, [{start_key, LStartKey}]),
+    {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0, [{dir, rev}, {start_key, RStartKey}]),
+    KVLen = FoldLRed + FoldRRed,
+    ok.
+
+test_traversal_callbacks(Btree, _KeyValues) ->
+    FoldFun =
+    fun
+        (visit, _GroupedKey, _Unreduced, Acc) ->
+            {ok, Acc andalso false};
+        (traverse, _LK, _Red, Acc) ->
+            {skip, Acc andalso true}
+    end,
+    % With 250 items the root is a kp. Always skipping should reduce to true.
+    {ok, _, true} = couch_btree:fold(Btree, FoldFun, true, [{dir, fwd}]),
+    ok.
+
+shuffle(List) ->
+   randomize(round(math:log(length(List)) + 0.5), List).
+
+randomize(1, List) ->
+   randomize(List);
+randomize(T, List) ->
+    lists:foldl(fun(_E, Acc) ->
+        randomize(Acc)
+    end, randomize(List), lists:seq(1, (T - 1))).
+
+randomize(List) ->
+    D = lists:map(fun(A) ->
+        {random:uniform(), A}
+    end, List),
+    {_, D1} = lists:unzip(lists:keysort(1, D)),
+    D1.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/021-btree-reductions.t
----------------------------------------------------------------------
diff --git a/src/test/etap/021-btree-reductions.t b/src/test/etap/021-btree-reductions.t
new file mode 100755
index 0000000..e80ac2d
--- /dev/null
+++ b/src/test/etap/021-btree-reductions.t
@@ -0,0 +1,237 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+filename() -> "./test/etap/temp.021".
+rows() -> 1000.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(20),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test()->
+    ReduceFun = fun
+        (reduce, KVs) -> length(KVs);
+        (rereduce, Reds) -> lists:sum(Reds)
+    end,
+
+    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
+    {ok, Btree} = couch_btree:open(nil, Fd, [{reduce, ReduceFun}]),
+
+    % Create a list, of {"even", Value} or {"odd", Value} pairs.
+    {_, EvenOddKVs} = lists:foldl(fun(Idx, {Key, Acc}) ->
+        case Key of
+            "even" -> {"odd", [{{Key, Idx}, 1} | Acc]};
+            _ -> {"even", [{{Key, Idx}, 1} | Acc]}
+        end
+    end, {"odd", []}, lists:seq(1, rows())),
+
+    {ok, Btree2} = couch_btree:add_remove(Btree, EvenOddKVs, []),
+
+    GroupFun = fun({K1, _}, {K2, _}) -> K1 == K2 end,
+    FoldFun = fun(GroupedKey, Unreduced, Acc) ->
+        {ok, [{GroupedKey, couch_btree:final_reduce(Btree2, Unreduced)} | Acc]}
+    end,
+
+    {SK1, EK1} = {{"even", -1}, {"even", foo}},
+    {SK2, EK2} = {{"odd", -1}, {"odd", foo}},
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}]),
+        "Reduction works with no specified direction, startkey, or endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, fwd}]),
+        "Reducing forward works with no startkey or endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, rev}]),
+        "Reducing backwards works with no startkey or endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK2}]),
+        "Reducing works over the entire range with startkey and endkey set."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"even", _}, 500}]}) -> true;
+            (_) -> false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]),
+        "Reducing forward over first half works with a startkey and endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, 500}]}) -> true;
+            (_) -> false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]),
+        "Reducing forward over second half works with second startkey and endkey"
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, 500}]}) -> true;
+            (_) -> false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK2}]),
+        "Reducing in reverse works after swapping the startkey and endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK1}]),
+        "Reducing in reverse results in reversed accumulator."
+    ),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, fwd}, {key_group_fun, GroupFun},
+            {start_key, {"even", 0}}, {end_key, {"odd", rows() + 1}}
+        ]),
+        {ok, [{{"odd", 1}, 500}, {{"even", 2}, 500}]},
+        "Right fold reduce value for whole range with inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, fwd}, {key_group_fun, GroupFun},
+            {start_key, {"even", 0}}, {end_key_gt, {"odd", 999}}
+        ]),
+        {ok, [{{"odd", 1}, 499}, {{"even", 2}, 500}]},
+        "Right fold reduce value for whole range without inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, rev}, {key_group_fun, GroupFun},
+            {start_key, {"odd", 999}}, {end_key, {"even", 2}}
+        ]),
+        {ok, [{{"even", 1000}, 500}, {{"odd", 999}, 500}]},
+        "Right fold reduce value for whole reversed range with inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, rev}, {key_group_fun, GroupFun},
+            {start_key, {"odd", 999}}, {end_key_gt, {"even", 2}}
+        ]),
+        {ok, [{{"even", 1000}, 499}, {{"odd", 999}, 500}]},
+        "Right fold reduce value for whole reversed range without inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, fwd}, {key_group_fun, GroupFun},
+            {start_key, {"even", 0}}, {end_key, {"odd", 499}}
+        ]),
+        {ok, [{{"odd", 1}, 250}, {{"even", 2}, 500}]},
+        "Right fold reduce value for first half with inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, fwd}, {key_group_fun, GroupFun},
+            {start_key, {"even", 0}}, {end_key_gt, {"odd", 499}}
+        ]),
+        {ok, [{{"odd", 1}, 249}, {{"even", 2}, 500}]},
+        "Right fold reduce value for first half without inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, rev}, {key_group_fun, GroupFun},
+            {start_key, {"odd", 999}}, {end_key, {"even", 500}}
+        ]),
+        {ok, [{{"even", 1000}, 251}, {{"odd", 999}, 500}]},
+        "Right fold reduce value for first half reversed with inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, rev}, {key_group_fun, GroupFun},
+            {start_key, {"odd", 999}}, {end_key_gt, {"even", 500}}
+        ]),
+        {ok, [{{"even", 1000}, 250}, {{"odd", 999}, 500}]},
+        "Right fold reduce value for first half reversed without inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, fwd}, {key_group_fun, GroupFun},
+            {start_key, {"even", 500}}, {end_key, {"odd", 999}}
+        ]),
+        {ok, [{{"odd", 1}, 500}, {{"even", 500}, 251}]},
+        "Right fold reduce value for second half with inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, fwd}, {key_group_fun, GroupFun},
+            {start_key, {"even", 500}}, {end_key_gt, {"odd", 999}}
+        ]),
+        {ok, [{{"odd", 1}, 499}, {{"even", 500}, 251}]},
+        "Right fold reduce value for second half without inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, rev}, {key_group_fun, GroupFun},
+            {start_key, {"odd", 501}}, {end_key, {"even", 2}}
+        ]),
+        {ok, [{{"even", 1000}, 500}, {{"odd", 501}, 251}]},
+        "Right fold reduce value for second half reversed with inclusive end key"),
+
+    etap:is(
+        couch_btree:fold_reduce(Btree2, FoldFun, [], [
+            {dir, rev}, {key_group_fun, GroupFun},
+            {start_key, {"odd", 501}}, {end_key_gt, {"even", 2}}
+        ]),
+        {ok, [{{"even", 1000}, 499}, {{"odd", 501}, 251}]},
+        "Right fold reduce value for second half reversed without inclusive end key"),
+
+    couch_file:close(Fd).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/030-doc-from-json.t
----------------------------------------------------------------------
diff --git a/src/test/etap/030-doc-from-json.t b/src/test/etap/030-doc-from-json.t
new file mode 100755
index 0000000..b0c393e
--- /dev/null
+++ b/src/test/etap/030-doc-from-json.t
@@ -0,0 +1,236 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+%% XXX: Figure out how to -include("couch_db.hrl")
+-record(doc, {id= <<"">>, revs={0, []}, body={[]},
+            atts=[], deleted=false, meta=[]}).
+-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
+            encoding=identity}).
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(26),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link(test_util:config_files()),
+    couch_config:set("attachments", "compression_level", "0", false),
+    ok = test_from_json_success(),
+    ok = test_from_json_errors(),
+    ok.
+
+test_from_json_success() ->
+    Cases = [
+        {
+            {[]},
+            #doc{},
+            "Return an empty document for an empty JSON object."
+        },
+        {
+            {[{<<"_id">>, <<"zing!">>}]},
+            #doc{id= <<"zing!">>},
+            "Parses document ids."
+        },
+        {
+            {[{<<"_id">>, <<"_design/foo">>}]},
+            #doc{id= <<"_design/foo">>},
+            "_design/document ids."
+        },
+        {
+            {[{<<"_id">>, <<"_local/bam">>}]},
+            #doc{id= <<"_local/bam">>},
+            "_local/document ids."
+        },
+        {
+            {[{<<"_rev">>, <<"4-230234">>}]},
+            #doc{revs={4, [<<"230234">>]}},
+            "_rev stored in revs."
+        },
+        {
+            {[{<<"soap">>, 35}]},
+            #doc{body={[{<<"soap">>, 35}]}},
+            "Non underscore prefixed fields stored in body."
+        },
+        {
+            {[{<<"_attachments">>, {[
+                {<<"my_attachment.fu">>, {[
+                    {<<"stub">>, true},
+                    {<<"content_type">>, <<"application/awesome">>},
+                    {<<"length">>, 45}
+                ]}},
+                {<<"noahs_private_key.gpg">>, {[
+                    {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
+                    {<<"content_type">>, <<"application/pgp-signature">>}
+                ]}}
+            ]}}]},
+            #doc{atts=[
+                #att{
+                    name = <<"my_attachment.fu">>,
+                    data = stub,
+                    type = <<"application/awesome">>,
+                    att_len = 45,
+                    disk_len = 45,
+                    revpos = nil
+                },
+                #att{
+                    name = <<"noahs_private_key.gpg">>,
+                    data = <<"I have a pet fish!">>,
+                    type = <<"application/pgp-signature">>,
+                    att_len = 18,
+                    disk_len = 18,
+                    revpos = 0
+                }
+            ]},
+            "Attachments are parsed correctly."
+        },
+        {
+            {[{<<"_deleted">>, true}]},
+            #doc{deleted=true},
+            "_deleted controls the deleted field."
+        },
+        {
+            {[{<<"_deleted">>, false}]},
+            #doc{},
+            "{\"_deleted\": false} is ok."
+        },
+        {
+            {[
+                {<<"_revisions">>, {[
+                    {<<"start">>, 4},
+                    {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}
+                ]}},
+                {<<"_rev">>, <<"6-something">>}
+            ]},
+            #doc{revs={4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
+            "_revisions attribute are preferred to _rev."
+        },
+        {
+            {[{<<"_revs_info">>, dropping}]},
+            #doc{},
+            "Drops _revs_info."
+        },
+        {
+            {[{<<"_local_seq">>, dropping}]},
+            #doc{},
+            "Drops _local_seq."
+        },
+        {
+            {[{<<"_conflicts">>, dropping}]},
+            #doc{},
+            "Drops _conflicts."
+        },
+        {
+            {[{<<"_deleted_conflicts">>, dropping}]},
+            #doc{},
+            "Drops _deleted_conflicts."
+        }
+    ],
+
+    lists:foreach(fun({EJson, Expect, Mesg}) ->
+        etap:is(couch_doc:from_json_obj(EJson), Expect, Mesg)
+    end, Cases),
+    ok.
+
+test_from_json_errors() ->
+    Cases = [
+        {
+            [],
+            {bad_request, "Document must be a JSON object"},
+            "arrays are invalid"
+        },
+        {
+            4,
+            {bad_request, "Document must be a JSON object"},
+            "integers are invalid"
+        },
+        {
+            true,
+            {bad_request, "Document must be a JSON object"},
+            "literals are invalid"
+        },
+        {
+            {[{<<"_id">>, {[{<<"foo">>, 5}]}}]},
+            {bad_request, <<"Document id must be a string">>},
+            "Document id must be a string."
+        },
+        {
+            {[{<<"_id">>, <<"_random">>}]},
+            {bad_request,
+                <<"Only reserved document ids may start with underscore.">>},
+            "Disallow arbitrary underscore prefixed docids."
+        },
+        {
+            {[{<<"_rev">>, 5}]},
+            {bad_request, <<"Invalid rev format">>},
+            "_rev must be a string"
+        },
+        {
+            {[{<<"_rev">>, "foobar"}]},
+            {bad_request, <<"Invalid rev format">>},
+            "_rev must be %d-%s"
+        },
+        {
+            {[{<<"_rev">>, "foo-bar"}]},
+            "Error if _rev's integer expection is broken."
+        },
+        {
+            {[{<<"_revisions">>, {[{<<"start">>, true}]}}]},
+            {doc_validation, "_revisions.start isn't an integer."},
+            "_revisions.start must be an integer."
+        },
+        {
+            {[{<<"_revisions">>, {[
+                {<<"start">>, 0},
+                {<<"ids">>, 5}
+            ]}}]},
+            {doc_validation, "_revisions.ids isn't a array."},
+            "_revions.ids must be a list."
+        },
+        {
+            {[{<<"_revisions">>, {[
+                {<<"start">>, 0},
+                {<<"ids">>, [5]}
+            ]}}]},
+            {doc_validation, "RevId isn't a string"},
+            "Revision ids must be strings."
+        },
+        {
+            {[{<<"_something">>, 5}]},
+            {doc_validation, <<"Bad special document member: _something">>},
+            "Underscore prefix fields are reserved."
+        }
+    ],
+
+    lists:foreach(fun
+        ({EJson, Expect, Mesg}) ->
+            Error = (catch couch_doc:from_json_obj(EJson)),
+            etap:is(Error, Expect, Mesg);
+        ({EJson, Mesg}) ->
+            try
+                couch_doc:from_json_obj(EJson),
+                etap:ok(false, "Conversion failed to raise an exception.")
+            catch
+                _:_ -> etap:ok(true, Mesg)
+            end
+    end, Cases),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/031-doc-to-json.t
----------------------------------------------------------------------
diff --git a/src/test/etap/031-doc-to-json.t b/src/test/etap/031-doc-to-json.t
new file mode 100755
index 0000000..ce950f9
--- /dev/null
+++ b/src/test/etap/031-doc-to-json.t
@@ -0,0 +1,197 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+%% XXX: Figure out how to -include("couch_db.hrl")
+-record(doc, {id= <<"">>, revs={0, []}, body={[]},
+            atts=[], deleted=false, meta=[]}).
+-record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
+            encoding=identity}).
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(12),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link(test_util:config_files()),
+    couch_config:set("attachments", "compression_level", "0", false),
+    ok = test_to_json_success(),
+    ok.
+
+test_to_json_success() ->
+    Cases = [
+        {
+            #doc{},
+            {[{<<"_id">>, <<"">>}]},
+            "Empty docs are {\"_id\": \"\"}"
+        },
+        {
+            #doc{id= <<"foo">>},
+            {[{<<"_id">>, <<"foo">>}]},
+            "_id is added."
+        },
+        {
+            #doc{revs={5, ["foo"]}},
+            {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]},
+            "_rev is added."
+        },
+        {
+            [revs],
+            #doc{revs={5, [<<"first">>, <<"second">>]}},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_rev">>, <<"5-first">>},
+                {<<"_revisions">>, {[
+                    {<<"start">>, 5},
+                    {<<"ids">>, [<<"first">>, <<"second">>]}
+                ]}}
+            ]},
+            "_revisions include with revs option"
+        },
+        {
+            #doc{body={[{<<"foo">>, <<"bar">>}]}},
+            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]},
+            "Arbitrary fields are added."
+        },
+        {
+            #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}},
+            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
+            "Deleted docs no longer drop body members."
+        },
+        {
+            #doc{meta=[
+                {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_revs_info">>, [
+                    {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
+                    {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
+                ]}
+            ]},
+            "_revs_info field is added correctly."
+        },
+        {
+            #doc{meta=[{local_seq, 5}]},
+            {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]},
+            "_local_seq is added as an integer."
+        },
+        {
+            #doc{meta=[{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]}
+            ]},
+            "_conflicts is added as an array of strings."
+        },
+        {
+            #doc{meta=[{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
+            ]},
+            "_deleted_conflicsts is added as an array of strings."
+        },
+        {
+            #doc{atts=[
+                #att{
+                    name = <<"big.xml">>, 
+                    type = <<"xml/sucks">>, 
+                    data = fun() -> ok end,
+                    revpos = 1,
+                    att_len = 400,
+                    disk_len = 400
+                },
+                #att{
+                    name = <<"fast.json">>, 
+                    type = <<"json/ftw">>, 
+                    data = <<"{\"so\": \"there!\"}">>,
+                    revpos = 1,
+                    att_len = 16,
+                    disk_len = 16
+                }
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_attachments">>, {[
+                    {<<"big.xml">>, {[
+                        {<<"content_type">>, <<"xml/sucks">>},
+                        {<<"revpos">>, 1},
+                        {<<"length">>, 400},
+                        {<<"stub">>, true}
+                    ]}},
+                    {<<"fast.json">>, {[
+                        {<<"content_type">>, <<"json/ftw">>},
+                        {<<"revpos">>, 1},
+                        {<<"length">>, 16},
+                        {<<"stub">>, true}
+                    ]}}
+                ]}}
+            ]},
+            "Attachments attached as stubs only include a length."
+        },
+        {
+            [attachments],
+            #doc{atts=[
+                #att{
+                    name = <<"stuff.txt">>,
+                    type = <<"text/plain">>,
+                    data = fun() -> <<"diet pepsi">> end,
+                    revpos = 1,
+                    att_len = 10,
+                    disk_len = 10
+                },
+                #att{
+                    name = <<"food.now">>,
+                    type = <<"application/food">>,
+                    revpos = 1,
+                    data = <<"sammich">>
+                }
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_attachments">>, {[
+                    {<<"stuff.txt">>, {[
+                        {<<"content_type">>, <<"text/plain">>},
+                        {<<"revpos">>, 1},
+                        {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
+                    ]}},
+                    {<<"food.now">>, {[
+                        {<<"content_type">>, <<"application/food">>},
+                        {<<"revpos">>, 1},
+                        {<<"data">>, <<"c2FtbWljaA==">>}
+                    ]}}
+                ]}}
+            ]},
+            "Attachments included inline with attachments option."
+        }
+    ],
+
+    lists:foreach(fun
+        ({Doc, EJson, Mesg}) ->
+            etap:is(couch_doc:to_json_obj(Doc, []), EJson, Mesg);
+        ({Options, Doc, EJson, Mesg}) ->
+            etap:is(couch_doc:to_json_obj(Doc, Options), EJson, Mesg)
+    end, Cases),
+    ok.
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/040-util.t
----------------------------------------------------------------------
diff --git a/src/test/etap/040-util.t b/src/test/etap/040-util.t
new file mode 100755
index 0000000..d57a32e
--- /dev/null
+++ b/src/test/etap/040-util.t
@@ -0,0 +1,80 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    application:start(crypto),
+
+    etap:plan(14),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    % to_existing_atom
+    etap:is(true, couch_util:to_existing_atom(true), "An atom is an atom."),
+    etap:is(foo, couch_util:to_existing_atom(<<"foo">>),
+        "A binary foo is the atom foo."),
+    etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"),
+        "A list of atoms is one munged atom."),
+
+    % implode
+    etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"),
+        "use & as separator in list."),
+
+    % trim
+    Strings = [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"],
+    etap:ok(lists:all(fun(S) -> couch_util:trim(S) == "foo" end, Strings),
+        "everything here trimmed should be foo."),
+
+    % abs_pathname
+    {ok, Cwd} = file:get_cwd(),
+    etap:is(Cwd ++ "/foo", couch_util:abs_pathname("./foo"),
+        "foo is in this directory."),
+
+    % should_flush
+    etap:ok(not couch_util:should_flush(),
+        "Not using enough memory to flush."),
+    AcquireMem = fun() ->
+        _IntsToAGazillion = lists:seq(1, 200000),
+        _LotsOfData = lists:map(
+            fun(Int) -> {Int, <<"foobar">>} end,
+        lists:seq(1, 500000)),
+        etap:ok(couch_util:should_flush(),
+            "Allocation 200K tuples puts us above the memory threshold.")
+    end,
+    AcquireMem(),
+
+    etap:ok(not couch_util:should_flush(),
+        "Checking to flush invokes GC."),
+
+    % verify
+    etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"),
+         "String comparison."),
+    etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"),
+         "String comparison (unequal lengths)."),
+    etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>),
+        "Binary comparison."),
+    etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>),
+        "Binary comparison (unequal lengths)."),
+    etap:is(false, couch_util:verify(nil, <<"ahBase3r">>),
+        "Binary comparison with atom."),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/041-uuid-gen-id.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/041-uuid-gen-id.ini b/src/test/etap/041-uuid-gen-id.ini
new file mode 100644
index 0000000..6886efd
--- /dev/null
+++ b/src/test/etap/041-uuid-gen-id.ini
@@ -0,0 +1,20 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+;
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[uuids]
+algorithm = utc_id
+utc_id_suffix = bozo

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/041-uuid-gen-seq.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/041-uuid-gen-seq.ini b/src/test/etap/041-uuid-gen-seq.ini
new file mode 100644
index 0000000..94cebc6
--- /dev/null
+++ b/src/test/etap/041-uuid-gen-seq.ini
@@ -0,0 +1,19 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[uuids]
+algorithm = sequential

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/041-uuid-gen-utc.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/041-uuid-gen-utc.ini b/src/test/etap/041-uuid-gen-utc.ini
new file mode 100644
index 0000000..c2b8383
--- /dev/null
+++ b/src/test/etap/041-uuid-gen-utc.ini
@@ -0,0 +1,19 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[uuids]
+algorithm = utc_random

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/041-uuid-gen.t
----------------------------------------------------------------------
diff --git a/src/test/etap/041-uuid-gen.t b/src/test/etap/041-uuid-gen.t
new file mode 100755
index 0000000..7234969
--- /dev/null
+++ b/src/test/etap/041-uuid-gen.t
@@ -0,0 +1,147 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+default_config() ->
+    test_util:build_file("etc/couchdb/default_dev.ini").
+
+seq_alg_config() ->
+    test_util:source_file("test/etap/041-uuid-gen-seq.ini").
+
+utc_alg_config() ->
+    test_util:source_file("test/etap/041-uuid-gen-utc.ini").
+
+utc_id_alg_config() ->
+    test_util:source_file("test/etap/041-uuid-gen-id.ini").
+
+% Run tests and wait for the gen_servers to shutdown
+run_test(IniFiles, Test) ->
+    {ok, Pid} = couch_config:start_link(IniFiles),
+    erlang:monitor(process, Pid),
+    couch_uuids:start(),
+    Test(),
+    couch_uuids:stop(),
+    couch_config:stop(),
+    receive
+        {'DOWN', _, _, Pid, _} -> ok;
+        _Other -> etap:diag("OTHER: ~p~n", [_Other])
+    after
+        1000 -> throw({timeout_error, config_stop})
+    end.
+
+main(_) ->
+    test_util:init_code_path(),
+    application:start(crypto),
+    etap:plan(9),
+
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+
+    TestUnique = fun() ->
+        etap:is(
+            test_unique(10000, couch_uuids:new()),
+            true,
+            "Can generate 10K unique IDs"
+        )
+    end,
+    run_test([default_config()], TestUnique),
+    run_test([default_config(), seq_alg_config()], TestUnique),
+    run_test([default_config(), utc_alg_config()], TestUnique),
+    run_test([default_config(), utc_id_alg_config()], TestUnique),
+
+    TestMonotonic = fun () ->
+        etap:is(
+            couch_uuids:new() < couch_uuids:new(),
+            true,
+            "should produce monotonically increasing ids"
+        )
+    end,
+    run_test([default_config(), seq_alg_config()], TestMonotonic),
+    run_test([default_config(), utc_alg_config()], TestMonotonic),
+    run_test([default_config(), utc_id_alg_config()], TestMonotonic),
+
+    % Pretty sure that the average of a uniform distribution is the
+    % midpoint of the range. Thus, to exceed a threshold, we need
+    % approximately Total / (Range/2 + RangeMin) samples.
+    %
+    % In our case this works out to be 8194. (0xFFF000 / 0x7FF)
+    % These tests just fudge the limits for a good generator at 25%
+    % in either direction. Technically it should be possible to generate
+    % bounds that will show if your random number generator is not
+    % sufficiently random but I hated statistics in school.
+    TestRollOver = fun() ->
+        UUID = binary_to_list(couch_uuids:new()),
+        Prefix = element(1, lists:split(26, UUID)),
+        N = gen_until_pref_change(Prefix,0),
+        etap:diag("N is: ~p~n",[N]),                           
+        etap:is(
+            N >= 5000 andalso N =< 11000,
+            true,
+            "should roll over every so often."
+        )
+    end,
+    run_test([default_config(), seq_alg_config()], TestRollOver),
+
+    TestSuffix = fun() ->
+        UUID = binary_to_list(couch_uuids:new()),
+        Suffix = get_suffix(UUID),
+        etap:is(
+            test_same_suffix(100, Suffix),
+            true,
+            "utc_id ids should have the same suffix."
+        )
+    end,
+    run_test([default_config(), utc_id_alg_config()], TestSuffix).
+
+test_unique(0, _) ->
+    true;
+test_unique(N, UUID) ->
+    case couch_uuids:new() of
+        UUID ->
+            etap:diag("N: ~p~n", [N]),
+            false;
+        Else -> test_unique(N-1, Else)
+    end.
+
+get_prefix(UUID) ->
+    element(1, lists:split(26, binary_to_list(UUID))).
+
+gen_until_pref_change(_, Count) when Count > 8251 ->
+    Count;
+gen_until_pref_change(Prefix, N) ->
+    case get_prefix(couch_uuids:new()) of
+        Prefix -> gen_until_pref_change(Prefix, N+1);
+        _ -> N
+    end.
+
+get_suffix(UUID) when is_binary(UUID)->
+    get_suffix(binary_to_list(UUID));
+get_suffix(UUID) ->
+    element(2, lists:split(14, UUID)).
+
+test_same_suffix(0, _) ->
+    true;
+test_same_suffix(N, Suffix) ->
+    case get_suffix(couch_uuids:new()) of
+        Suffix -> test_same_suffix(N-1, Suffix);
+        _ -> false
+    end.


[05/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/042-work-queue.t
----------------------------------------------------------------------
diff --git a/test/etap/042-work-queue.t b/test/etap/042-work-queue.t
deleted file mode 100755
index 8594a6f..0000000
--- a/test/etap/042-work-queue.t
+++ /dev/null
@@ -1,500 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(155),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    ok = crypto:start(),
-    test_single_consumer_max_item_count(),
-    test_single_consumer_max_size(),
-    test_single_consumer_max_item_count_and_size(),
-    test_multiple_consumers(),
-    ok.
-
-
-test_single_consumer_max_item_count() ->
-    etap:diag("Spawning a queue with 3 max items, 1 producer and 1 consumer"),
-
-    {ok, Q} = couch_work_queue:new([{max_items, 3}]),
-    Producer = spawn_producer(Q),
-    Consumer = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), timeout,
-        "Consumer blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item1 = produce(Producer, 10),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-
-    etap:is(ping(Consumer), ok, "Consumer unblocked"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item1]},
-        "Consumer received the right item"),
-
-    Item2 = produce(Producer, 20),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-
-    Item3 = produce(Producer, 15),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-
-    Item4 = produce(Producer, 3),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(ping(Producer), timeout, "Producer blocked with full queue"),
-
-    consume(Consumer, 2),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 2 items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item2, Item3]},
-        "Consumer received the right items"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-
-    consume(Consumer, 2),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 2 items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item4]},
-        "Consumer received the right item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    consume(Consumer, 100),
-    etap:is(ping(Consumer), timeout,
-        "Consumer blocked when attempting to dequeue 100 items from empty queue"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    Item5 = produce(Producer, 11),
-    etap:is(ping(Producer), ok, "Producer not blocked with empty queue"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    Item6 = produce(Producer, 19),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-
-    Item7 = produce(Producer, 2),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-
-    Item8 = produce(Producer, 33),
-    etap:is(ping(Producer), timeout, "Producer blocked with full queue"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-
-    etap:is(ping(Consumer), ok, "Consumer unblocked"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item5]},
-        "Consumer received the first queued item"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item6, Item7, Item8]},
-        "Consumer received all queued items"),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    etap:is(close_queue(Q), ok, "Closed queue"),
-    consume(Consumer, 1),
-    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    stop(Producer, "producer"),
-    stop(Consumer, "consumer").
-
-
-
-test_single_consumer_max_size() ->
-    etap:diag("Spawning a queue with max size of 160 bytes, "
-        "1 producer and 1 consumer"),
-
-    {ok, Q} = couch_work_queue:new([{max_size, 160}]),
-    Producer = spawn_producer(Q),
-    Consumer = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), timeout,
-        "Consumer blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item1 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-
-    etap:is(ping(Consumer), ok, "Consumer unblocked"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item1]},
-        "Consumer received the right item"),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item2 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 50, "Queue size is 50 bytes"),
-
-    Item3 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 100, "Queue size is 100 bytes"),
-
-    Item4 = produce(Producer, 61),
-    etap:is(ping(Producer), timeout, "Producer blocked"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(couch_work_queue:size(Q), 161, "Queue size is 161 bytes"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 1 item from full queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item2]},
-        "Consumer received the right item"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 111, "Queue size is 111 bytes"),
-
-    Item5 = produce(Producer, 20),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(couch_work_queue:size(Q), 131, "Queue size is 131 bytes"),
-
-    Item6 = produce(Producer, 40),
-    etap:is(ping(Producer), timeout, "Producer blocked"),
-    etap:is(couch_work_queue:item_count(Q), 4, "Queue item count is 4"),
-    etap:is(couch_work_queue:size(Q), 171, "Queue size is 171 bytes"),
-
-    etap:is(close_queue(Q), timeout,
-        "Timeout when trying to close non-empty queue"),
-
-    consume(Consumer, 2),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 2 items from full queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item3, Item4]},
-        "Consumer received the right items"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 60, "Queue size is 60 bytes"),
-
-    etap:is(close_queue(Q), timeout,
-        "Timeout when trying to close non-empty queue"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item5, Item6]},
-        "Consumer received the right items"),
-
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    consume(Consumer, all),
-    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
-
-    stop(Producer, "producer"),
-    stop(Consumer, "consumer").
-
-
-test_single_consumer_max_item_count_and_size() ->
-    etap:diag("Spawning a queue with 3 max items, max size of 200 bytes, "
-        "1 producer and 1 consumer"),
-
-    {ok, Q} = couch_work_queue:new([{max_items, 3}, {max_size, 200}]),
-    Producer = spawn_producer(Q),
-    Consumer = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item1 = produce(Producer, 100),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 100, "Queue size is 100 bytes"),
-
-    Item2 = produce(Producer, 110),
-    etap:is(ping(Producer), timeout,
-        "Producer blocked when queue size >= max_size"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 210, "Queue size is 210 bytes"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item1, Item2]},
-        "Consumer received the right items"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Producer), ok, "Producer not blocked anymore"),
-
-    Item3 = produce(Producer, 10),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 10, "Queue size is 10 bytes"),
-
-    Item4 = produce(Producer, 4),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 14, "Queue size is 14 bytes"),
-
-    Item5 = produce(Producer, 2),
-    etap:is(ping(Producer), timeout,
-        "Producer blocked when queue item count = max_items"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(couch_work_queue:size(Q), 16, "Queue size is 16 bytes"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 1 item from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item3]},
-       "Consumer received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 6, "Queue size is 6 bytes"),
-
-    etap:is(close_queue(Q), timeout,
-        "Timeout when trying to close non-empty queue"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 1 item from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item4]},
-       "Consumer received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 2, "Queue size is 2 bytes"),
-
-    Item6 = produce(Producer, 50),
-    etap:is(ping(Producer), ok,
-        "Producer not blocked when queue is not full and already received"
-        " a close request"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 52, "Queue size is 52 bytes"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item5, Item6]},
-       "Consumer received all queued items"),
-
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    consume(Consumer, 1),
-    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
-
-    stop(Producer, "producer"),
-    stop(Consumer, "consumer").
-
-
-test_multiple_consumers() ->
-    etap:diag("Spawning a queue with 3 max items, max size of 200 bytes, "
-        "1 producer and 3 consumers"),
-
-    {ok, Q} = couch_work_queue:new(
-        [{max_items, 3}, {max_size, 200}, {multi_workers, true}]),
-    Producer = spawn_producer(Q),
-    Consumer1 = spawn_consumer(Q),
-    Consumer2 = spawn_consumer(Q),
-    Consumer3 = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    consume(Consumer1, 1),
-    etap:is(ping(Consumer1), timeout,
-        "Consumer 1 blocked when attempting to dequeue 1 item from empty queue"),
-    consume(Consumer2, 2),
-    etap:is(ping(Consumer2), timeout,
-        "Consumer 2 blocked when attempting to dequeue 2 items from empty queue"),
-    consume(Consumer3, 1),
-    etap:is(ping(Consumer3), timeout,
-        "Consumer 3 blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item1 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item2 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item3 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Consumer1), ok, "Consumer 1 unblocked"),
-    etap:is(last_consumer_items(Consumer1), {ok, [Item1]},
-       "Consumer 1 received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Consumer2), ok, "Consumer 2 unblocked"),
-    etap:is(last_consumer_items(Consumer2), {ok, [Item2]},
-       "Consumer 2 received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Consumer3), ok, "Consumer 3 unblocked"),
-    etap:is(last_consumer_items(Consumer3), {ok, [Item3]},
-       "Consumer 3 received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    consume(Consumer1, 1),
-    etap:is(ping(Consumer1), timeout,
-        "Consumer 1 blocked when attempting to dequeue 1 item from empty queue"),
-    consume(Consumer2, 2),
-    etap:is(ping(Consumer2), timeout,
-        "Consumer 2 blocked when attempting to dequeue 1 item from empty queue"),
-    consume(Consumer3, 1),
-    etap:is(ping(Consumer3), timeout,
-        "Consumer 3 blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item4 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(close_queue(Q), ok, "Closed queue"),
-
-    etap:is(ping(Consumer1), ok, "Consumer 1 unblocked"),
-    etap:is(last_consumer_items(Consumer1), {ok, [Item4]},
-       "Consumer 1 received 1 item"),
-
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    etap:is(ping(Consumer2), ok, "Consumer 2 unblocked"),
-    etap:is(last_consumer_items(Consumer2), closed,
-        "Consumer 2 received 'closed' atom"),
-
-    etap:is(ping(Consumer3), ok, "Consumer 3 unblocked"),
-    etap:is(last_consumer_items(Consumer3), closed,
-        "Consumer 3 received 'closed' atom"),
-
-    stop(Producer, "producer"),
-    stop(Consumer1, "consumer 1"),
-    stop(Consumer2, "consumer 2"),
-    stop(Consumer3, "consumer 3").
-
-
-close_queue(Q) ->
-    ok = couch_work_queue:close(Q),
-    MonRef = erlang:monitor(process, Q),
-    receive
-    {'DOWN', MonRef, process, Q, _Reason} ->
-         etap:diag("Queue closed")
-    after 3000 ->
-         erlang:demonitor(MonRef),
-         timeout
-    end.
-
-
-spawn_consumer(Q) ->
-    Parent = self(),
-    spawn(fun() -> consumer_loop(Parent, Q, nil) end).
-
-
-consumer_loop(Parent, Q, PrevItem) ->
-    receive
-    {stop, Ref} ->
-        Parent ! {ok, Ref};
-    {ping, Ref} ->
-        Parent ! {pong, Ref},
-        consumer_loop(Parent, Q, PrevItem);
-    {last_item, Ref} ->
-        Parent ! {item, Ref, PrevItem},
-        consumer_loop(Parent, Q, PrevItem);
-    {consume, N} ->
-        Result = couch_work_queue:dequeue(Q, N),
-        consumer_loop(Parent, Q, Result)
-    end.
-
-
-spawn_producer(Q) ->
-    Parent = self(),
-    spawn(fun() -> producer_loop(Parent, Q) end).
-
-
-producer_loop(Parent, Q) ->
-    receive
-    {stop, Ref} ->
-        Parent ! {ok, Ref};
-    {ping, Ref} ->
-        Parent ! {pong, Ref},
-        producer_loop(Parent, Q);
-    {produce, Ref, Size} ->
-        Item = crypto:rand_bytes(Size),
-        Parent ! {item, Ref, Item},
-        ok = couch_work_queue:queue(Q, Item),
-        producer_loop(Parent, Q)
-    end.
-
-
-consume(Consumer, N) ->
-    Consumer ! {consume, N}.
-
-
-last_consumer_items(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {last_item, Ref},
-    receive
-    {item, Ref, Items} ->
-        Items
-    after 3000 ->
-        timeout
-    end.
-
-
-produce(Producer, Size) ->
-    Ref = make_ref(),
-    Producer ! {produce, Ref, Size},
-    receive
-    {item, Ref, Item} ->
-        Item
-    after 3000 ->
-        etap:bail("Timeout asking producer to produce an item")
-    end.
-
-
-ping(Pid) ->
-    Ref = make_ref(),
-    Pid ! {ping, Ref},
-    receive
-    {pong, Ref} ->
-        ok
-    after 3000 ->
-        timeout
-    end.
-
-
-stop(Pid, Name) ->
-    Ref = make_ref(),
-    Pid ! {stop, Ref},
-    receive
-    {ok, Ref} ->
-        etap:diag("Stopped " ++ Name)
-    after 3000 ->
-        etap:bail("Timeout stopping " ++ Name)
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/050-stream.t
----------------------------------------------------------------------
diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t
deleted file mode 100755
index 0251f00..0000000
--- a/test/etap/050-stream.t
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(13),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-read_all(Fd, PosList) ->
-    Data = couch_stream:foldl(Fd, PosList, fun(Bin, Acc) -> [Bin, Acc] end, []),
-    iolist_to_binary(Data).
-
-test() ->
-    {ok, Fd} = couch_file:open("test/etap/temp.050", [create,overwrite]),
-    {ok, Stream} = couch_stream:open(Fd),
-
-    etap:is(ok, couch_stream:write(Stream, <<"food">>),
-        "Writing to streams works."),
-
-    etap:is(ok, couch_stream:write(Stream, <<"foob">>),
-        "Consecutive writing to streams works."),
-
-    etap:is(ok, couch_stream:write(Stream, <<>>),
-        "Writing an empty binary does nothing."),
-
-    {Ptrs, Length, _, _, _} = couch_stream:close(Stream),
-    etap:is(Ptrs, [{0, 8}], "Close returns the file pointers."),
-    etap:is(Length, 8, "Close also returns the number of bytes written."),
-    etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."),
-
-    % Remember where we expect the pointer to be.
-    {ok, ExpPtr} = couch_file:bytes(Fd),
-    {ok, Stream2} = couch_stream:open(Fd),
-    OneBits = <<1:(8*10)>>,
-    etap:is(ok, couch_stream:write(Stream2, OneBits),
-        "Successfully wrote 79 zero bits and 1 one bit."),
-
-    ZeroBits = <<0:(8*10)>>,
-    etap:is(ok, couch_stream:write(Stream2, ZeroBits),
-        "Successfully wrote 80 0 bits."),
-
-    {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2),
-    etap:is(Ptrs2, [{ExpPtr, 20}], "Closing stream returns the file pointers."),
-    etap:is(Length2, 20, "Length written is 160 bytes."),
-
-    AllBits = iolist_to_binary([OneBits,ZeroBits]),
-    etap:is(AllBits, read_all(Fd, Ptrs2), "Returned pointers are valid."),
-
-    % Stream more the 4K chunk size.
-    {ok, ExpPtr2} = couch_file:bytes(Fd),
-    {ok, Stream3} = couch_stream:open(Fd, [{buffer_size, 4096}]),
-    lists:foldl(fun(_, Acc) ->
-        Data = <<"a1b2c">>,
-        couch_stream:write(Stream3, Data),
-        [Data | Acc]
-    end, [], lists:seq(1, 1024)),
-    {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3),
-
-    % 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold)
-    % + 5 puts us over the threshold
-    % + 4 bytes for the term_to_binary adding a length header
-    % + 1 byte every 4K for tail append headers
-    SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1,
-    etap:is(Ptrs3, [{ExpPtr2, 4100}, {SecondPtr, 1020}], "Pointers every 4K bytes."),
-    etap:is(Length3, 5120, "Wrote the expected 5K bytes."),
-
-    couch_file:close(Fd),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/060-kt-merging.t
----------------------------------------------------------------------
diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t
deleted file mode 100755
index efbdbf6..0000000
--- a/test/etap/060-kt-merging.t
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(16),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    One = {1, {"1","foo",[]}},
-
-    etap:is(
-        {[One], no_conflicts},
-        couch_key_tree:merge([], One, 10),
-        "The empty tree is the identity for merge."
-    ),
-    etap:is(
-        {[One], no_conflicts},
-        couch_key_tree:merge([One], One, 10),
-        "Merging is reflexive."
-    ),
-
-    TwoSibs = [{1, {"1","foo",[]}},
-               {1, {"2","foo",[]}}],
-
-    etap:is(
-        {TwoSibs, no_conflicts},
-        couch_key_tree:merge(TwoSibs, One, 10),
-        "Merging a prefix of a tree with the tree yields the tree."
-    ),
-
-    Three = {1, {"3","foo",[]}},
-    ThreeSibs = [{1, {"1","foo",[]}},
-                 {1, {"2","foo",[]}},
-                 {1, {"3","foo",[]}}],
-
-    etap:is(
-        {ThreeSibs, conflicts},
-        couch_key_tree:merge(TwoSibs, Three, 10),
-        "Merging a third unrelated branch leads to a conflict."
-    ),
-
-
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge([TwoChild], TwoChild, 10),
-        "Merging two children is still reflexive."
-    ),
-
-    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
-                                     {"1b", "bar", []}]}},
-    etap:is(
-        {[TwoChildSibs], no_conflicts},
-        couch_key_tree:merge([TwoChildSibs], TwoChildSibs, 10),
-        "Merging a tree to itself is itself."),
-
-    TwoChildPlusSibs =
-        {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
-                         {"1b", "bar", []}]}},
-
-    etap:is(
-        {[TwoChildPlusSibs], no_conflicts},
-        couch_key_tree:merge([TwoChild], TwoChildSibs, 10),
-        "Merging tree of uneven length at node 2."),
-
-    Stemmed1b = {2, {"1a", "bar", []}},
-    etap:is(
-        {[TwoChildSibs], no_conflicts},
-        couch_key_tree:merge([TwoChildSibs], Stemmed1b, 10),
-        "Merging a tree with a stem."
-    ),
-
-    TwoChildSibs2 = {1, {"1","foo", [{"1a", "bar", []},
-                                     {"1b", "bar", [{"1bb", "boo", []}]}]}},
-    Stemmed1bb = {3, {"1bb", "boo", []}},
-    etap:is(
-        {[TwoChildSibs2], no_conflicts},
-        couch_key_tree:merge([TwoChildSibs2], Stemmed1bb, 10),
-        "Merging a stem at a deeper level."
-    ),
-
-    StemmedTwoChildSibs2 = [{2,{"1a", "bar", []}},
-                            {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
-
-    etap:is(
-        {StemmedTwoChildSibs2, no_conflicts},
-        couch_key_tree:merge(StemmedTwoChildSibs2, Stemmed1bb, 10),
-        "Merging a stem at a deeper level against paths at deeper levels."
-    ),
-
-    Stemmed1aa = {3, {"1aa", "bar", []}},
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge([TwoChild], Stemmed1aa, 10),
-        "Merging a single tree with a deeper stem."
-    ),
-
-    Stemmed1a = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge([TwoChild], Stemmed1a, 10),
-        "Merging a larger stem."
-    ),
-
-    etap:is(
-        {[Stemmed1a], no_conflicts},
-        couch_key_tree:merge([Stemmed1a], Stemmed1aa, 10),
-        "More merging."
-    ),
-
-    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
-    Expect1 = [OneChild, Stemmed1aa],
-    etap:is(
-        {Expect1, conflicts},
-        couch_key_tree:merge([OneChild], Stemmed1aa, 10),
-        "Merging should create conflicts."
-    ),
-
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge(Expect1, TwoChild, 10),
-        "Merge should have no conflicts."
-    ),
-
-    %% this test is based on couch-902-test-case2.py
-    %% foo has conflicts from replication at depth two
-    %% foo3 is the current value
-    Foo = {1, {"foo",
-               "val1",
-               [{"foo2","val2",[]},
-                {"foo3", "val3", []}
-               ]}},
-    %% foo now has an attachment added, which leads to foo4 and val4
-    %% off foo3
-    Bar = {1, {"foo",
-               [],
-               [{"foo3",
-                 [],
-                 [{"foo4","val4",[]}
-                  ]}]}},
-    %% this is what the merge returns
-    %% note that it ignore the conflicting branch as there's no match
-    FooBar = {1, {"foo",
-               "val1",
-               [{"foo2","val2",[]},
-                {"foo3", "val3", [{"foo4","val4",[]}]}
-               ]}},
-
-    etap:is(
-      {[FooBar], no_conflicts},
-      couch_key_tree:merge([Foo],Bar,10),
-      "Merging trees with conflicts ought to behave."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/061-kt-missing-leaves.t
----------------------------------------------------------------------
diff --git a/test/etap/061-kt-missing-leaves.t b/test/etap/061-kt-missing-leaves.t
deleted file mode 100755
index d60b4db..0000000
--- a/test/etap/061-kt-missing-leaves.t
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(4),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    Stemmed2 = [{2, {"1aa", "bar", []}}],
-
-    etap:is(
-        [],
-        couch_key_tree:find_missing(TwoChildSibs, [{0,"1"}, {1,"1a"}]),
-        "Look for missing keys."
-    ),
-
-    etap:is(
-        [{0, "10"}, {100, "x"}],
-        couch_key_tree:find_missing(
-            TwoChildSibs,
-            [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}]
-        ),
-        "Look for missing keys."
-    ),
-
-    etap:is(
-        [{0, "1"}, {100, "x"}],
-        couch_key_tree:find_missing(
-            Stemmed1,
-            [{0,"1"}, {1,"1a"}, {100, "x"}]
-        ),
-        "Look for missing keys."
-    ),
-    etap:is(
-        [{0, "1"}, {1,"1a"}, {100, "x"}],
-        couch_key_tree:find_missing(
-            Stemmed2,
-            [{0,"1"}, {1,"1a"}, {100, "x"}]
-        ),
-        "Look for missing keys."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/062-kt-remove-leaves.t
----------------------------------------------------------------------
diff --git a/test/etap/062-kt-remove-leaves.t b/test/etap/062-kt-remove-leaves.t
deleted file mode 100755
index 745a00b..0000000
--- a/test/etap/062-kt-remove-leaves.t
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(6),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-
-    etap:is(
-        {TwoChildSibs, []},
-        couch_key_tree:remove_leafs(TwoChildSibs, []),
-        "Removing no leaves has no effect on the tree."
-    ),
-
-    etap:is(
-        {TwoChildSibs, []},
-        couch_key_tree:remove_leafs(TwoChildSibs, [{0, "1"}]),
-        "Removing a non-existant branch has no effect."
-    ),
-
-    etap:is(
-        {OneChild, [{1, "1b"}]},
-        couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1b"}]),
-        "Removing a leaf removes the leaf."
-    ),
-
-    etap:is(
-        {[], [{1, "1b"},{1, "1a"}]},
-        couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1a"}, {1, "1b"}]),
-        "Removing all leaves returns an empty tree."
-    ),
-
-    etap:is(
-        {Stemmed, []},
-        couch_key_tree:remove_leafs(Stemmed, [{1, "1a"}]),
-        "Removing a non-existant node has no effect."
-    ),
-
-    etap:is(
-        {[], [{2, "1aa"}]},
-        couch_key_tree:remove_leafs(Stemmed, [{2, "1aa"}]),
-        "Removing the last leaf returns an empty tree."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/063-kt-get-leaves.t
----------------------------------------------------------------------
diff --git a/test/etap/063-kt-get-leaves.t b/test/etap/063-kt-get-leaves.t
deleted file mode 100755
index 6d4e800..0000000
--- a/test/etap/063-kt-get-leaves.t
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(11),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-
-    etap:is(
-        {[{"foo", {0, ["1"]}}],[]},
-        couch_key_tree:get(TwoChildSibs, [{0, "1"}]),
-        "extract a subtree."
-    ),
-
-    etap:is(
-        {[{"bar", {1, ["1a", "1"]}}],[]},
-        couch_key_tree:get(TwoChildSibs, [{1, "1a"}]),
-        "extract a subtree."
-    ),
-
-    etap:is(
-        {[],[{0,"x"}]},
-        couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}]),
-        "gather up the leaves."
-    ),
-
-    etap:is(
-        {[{"bar", {1, ["1a","1"]}}],[]},
-        couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}]),
-        "gather up the leaves."
-    ),
-
-    etap:is(
-        {[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
-        couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}]),
-        "gather up the leaves."
-    ),
-
-    etap:is(
-        {[{0,[{"1", "foo"}]}],[]},
-        couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}]),
-        "retrieve full key paths."
-    ),
-
-    etap:is(
-        {[{1,[{"1a", "bar"},{"1", "foo"}]}],[]},
-        couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}]),
-        "retrieve full key paths."
-    ),
-
-    etap:is(
-        [{2, [{"1aa", "bar"},{"1a", "bar"}]}],
-        couch_key_tree:get_all_leafs_full(Stemmed),
-        "retrieve all leaves."
-    ),
-
-    etap:is(
-        [{1, [{"1a", "bar"},{"1", "foo"}]}, {1, [{"1b", "bar"},{"1", "foo"}]}],
-        couch_key_tree:get_all_leafs_full(TwoChildSibs),
-        "retrieve all the leaves."
-    ),
-
-    etap:is(
-        [{"bar", {2, ["1aa","1a"]}}],
-        couch_key_tree:get_all_leafs(Stemmed),
-        "retrieve all leaves."
-    ),
-
-    etap:is(
-        [{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}],
-        couch_key_tree:get_all_leafs(TwoChildSibs),
-        "retrieve all the leaves."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/064-kt-counting.t
----------------------------------------------------------------------
diff --git a/test/etap/064-kt-counting.t b/test/etap/064-kt-counting.t
deleted file mode 100755
index f182d28..0000000
--- a/test/etap/064-kt-counting.t
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(4),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    EmptyTree = [],
-    One = [{0, {"1","foo",[]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed = [{2, {"1bb", "boo", []}}],
-
-    etap:is(0, couch_key_tree:count_leafs(EmptyTree),
-        "Empty trees have no leaves."),
-
-    etap:is(1, couch_key_tree:count_leafs(One),
-        "Single node trees have a single leaf."),
-
-    etap:is(2, couch_key_tree:count_leafs(TwoChildSibs),
-        "Two children siblings counted as two leaves."),
-
-    etap:is(1, couch_key_tree:count_leafs(Stemmed),
-        "Stemming does not affect leaf counting."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/065-kt-stemming.t
----------------------------------------------------------------------
diff --git a/test/etap/065-kt-stemming.t b/test/etap/065-kt-stemming.t
deleted file mode 100755
index 6e781c1..0000000
--- a/test/etap/065-kt-stemming.t
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(3),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
-    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    Stemmed2 = [{2, {"1aa", "bar", []}}],
-
-    etap:is(TwoChild, couch_key_tree:stem(TwoChild, 3),
-        "Stemming more levels than what exists does nothing."),
-
-    etap:is(Stemmed1, couch_key_tree:stem(TwoChild, 2),
-        "Stemming with a depth of two returns the deepest two nodes."),
-
-    etap:is(Stemmed2, couch_key_tree:stem(TwoChild, 1),
-        "Stemming to a depth of one returns the deepest node."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/070-couch-db.t
----------------------------------------------------------------------
diff --git a/test/etap/070-couch-db.t b/test/etap/070-couch-db.t
deleted file mode 100755
index 787d6c6..0000000
--- a/test/etap/070-couch-db.t
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(4),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    couch_server_sup:start_link(test_util:config_files()),
-
-    couch_db:create(<<"etap-test-db">>, []),
-    {ok, AllDbs} = couch_server:all_databases(),
-    etap:ok(lists:member(<<"etap-test-db">>, AllDbs), "Database was created."),
-
-    couch_server:delete(<<"etap-test-db">>, []),
-    {ok, AllDbs2} = couch_server:all_databases(),
-    etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2),
-        "Database was deleted."),
-
-    gen_server:call(couch_server, {set_max_dbs_open, 3}),
-    MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end,
-
-    lists:foreach(fun(Int) ->
-        {ok, TestDbs} = couch_server:all_databases(),
-        ok = case lists:member(MkDbName(Int), TestDbs) of
-            true -> couch_server:delete(MkDbName(Int), []);
-            _ -> ok
-        end,
-        {ok, Db} = couch_db:create(MkDbName(Int), []),
-        ok = couch_db:close(Db)
-    end, lists:seq(1, 6)),
-
-    {ok, AllDbs3} = couch_server:all_databases(),
-    NumCreated = lists:foldl(fun(Int, Acc) ->
-        true = lists:member(MkDbName(Int), AllDbs3),
-        Acc+1
-    end, 0, lists:seq(1, 6)),
-    etap:is(6, NumCreated, "Created all databases."),
-
-    lists:foreach(fun(Int) ->
-        ok = couch_server:delete(MkDbName(Int), [])
-    end, lists:seq(1, 6)),
-
-    {ok, AllDbs4} = couch_server:all_databases(),
-    NumDeleted = lists:foldl(fun(Int, Acc) ->
-        false = lists:member(MkDbName(Int), AllDbs4),
-        Acc+1
-    end, 0, lists:seq(1, 6)),
-    etap:is(6, NumDeleted, "Deleted all databases."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/072-cleanup.t
----------------------------------------------------------------------
diff --git a/test/etap/072-cleanup.t b/test/etap/072-cleanup.t
deleted file mode 100755
index 9cbcdfa..0000000
--- a/test/etap/072-cleanup.t
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(TEST_DB, <<"etap-test-db">>).
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--define(ADMIN_USER, #user_ctx{roles=[<<"_admin">>]}).
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(7),
-    try test() of
-        ok ->
-            etap:end_tests()
-    catch
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            timer:sleep(1000),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    {ok, _} = couch_server_sup:start_link(test_util:config_files()),
-    couch_server:delete(?TEST_DB, []),
-    timer:sleep(1000),
-
-    couch_db:create(?TEST_DB, []),
-
-    {ok, AllDbs} = couch_server:all_databases(),
-    etap:ok(lists:member(?TEST_DB, AllDbs), "Database was created."),
-
-    FooRev = create_design_doc(<<"_design/foo">>, <<"bar">>),
-    query_view("foo", "bar"),
-
-    BoozRev = create_design_doc(<<"_design/booz">>, <<"baz">>),
-    query_view("booz", "baz"),
-
-    {ok, _Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    view_cleanup(),
-    etap:is(count_index_files(), 2,
-        "Two index files before any deletions."),
-
-    delete_design_doc(<<"_design/foo">>, FooRev),
-    view_cleanup(),
-    etap:is(count_index_files(), 1,
-        "One index file after first deletion and cleanup."),
-
-    delete_design_doc(<<"_design/booz">>, BoozRev),
-    view_cleanup(),
-    etap:is(count_index_files(), 0,
-        "No index files after second deletion and cleanup."),
-
-    couch_server:delete(?TEST_DB, []),
-    {ok, AllDbs2} = couch_server:all_databases(),
-    etap:ok(not lists:member(?TEST_DB, AllDbs2),
-        "Database was deleted."),
-    ok.
-
-create_design_doc(DDName, ViewName) ->
-    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, DDName},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {ViewName, {[
-                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db),
-    Rev.
-
-delete_design_doc(DDName, Rev) ->
-    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, DDName},
-        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
-        {<<"_deleted">>, true}
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, [Rev]),
-    couch_db:close(Db).
-
-db_url() ->
-    Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
-    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
-    "http://" ++ Addr ++ ":" ++ Port ++ "/" ++
-        binary_to_list(?TEST_DB).
-
-query_view(DDoc, View) ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/_design/" ++ DDoc ++ "/_view/" ++ View, [], get),
-    etap:is(Code, 200, "Built view index for " ++ DDoc ++ "."),
-    ok.
-
-view_cleanup() ->
-    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    couch_mrview:cleanup(Db),
-    couch_db:close(Db).
-
-count_index_files() ->
-    % call server to fetch the index files
-    RootDir = couch_config:get("couchdb", "view_index_dir"),
-    length(filelib:wildcard(RootDir ++ "/." ++
-        binary_to_list(?TEST_DB) ++ "_design"++"/mrview/*")).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/073-changes.t
----------------------------------------------------------------------
diff --git a/test/etap/073-changes.t b/test/etap/073-changes.t
deleted file mode 100755
index d632c2f..0000000
--- a/test/etap/073-changes.t
+++ /dev/null
@@ -1,558 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% Verify that compacting databases that are being used as the source or
-% target of a replication doesn't affect the replication and that the
-% replication doesn't hold their reference counters forever.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--record(changes_args, {
-    feed = "normal",
-    dir = fwd,
-    since = 0,
-    limit = 1000000000000000,
-    style = main_only,
-    heartbeat,
-    timeout,
-    filter = "",
-    filter_fun,
-    filter_args = [],
-    include_docs = false,
-    doc_options = [],
-    conflicts = false,
-    db_open_options = []
-}).
-
--record(row, {
-    id,
-    seq,
-    deleted = false
-}).
-
-
-test_db_name() -> <<"couch_test_changes">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(43),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-
-    test_by_doc_ids(),
-    test_by_doc_ids_with_since(),
-    test_by_doc_ids_continuous(),
-    test_design_docs_only(),
-    test_heartbeat(),
-
-    couch_server_sup:stop(),
-    ok.
-
-
-test_by_doc_ids() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    {ok, _Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-
-    etap:diag("Folding changes in ascending order with _doc_ids filter"),
-    ChangesArgs = #changes_args{
-        filter = "_doc_ids"
-    },
-    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
-
-    {Rows, LastSeq} = wait_finished(Consumer),
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    UpSeq = couch_db:get_update_seq(Db2),
-    couch_db:close(Db2),
-    etap:is(length(Rows), 2, "Received 2 changes rows"),
-    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
-    [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-    etap:is(Id1, <<"doc4">>, "First row is for doc doc4"),
-    etap:is(Seq1, 4, "First row has seq 4"),
-    etap:is(Id2, <<"doc3">>, "Second row is for doc doc3"),
-    etap:is(Seq2, 6, "Second row has seq 6"),
-
-    stop(Consumer),
-    etap:diag("Folding changes in descending order with _doc_ids filter"),
-    ChangesArgs2 = #changes_args{
-        filter = "_doc_ids",
-        dir = rev
-    },
-    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs2, Req),
-
-    {Rows2, LastSeq2} = wait_finished(Consumer2),
-    etap:is(length(Rows2), 2, "Received 2 changes rows"),
-    etap:is(LastSeq2, 4, "LastSeq is 4"),
-    [#row{seq = Seq1_2, id = Id1_2}, #row{seq = Seq2_2, id = Id2_2}] = Rows2,
-    etap:is(Id1_2, <<"doc3">>, "First row is for doc doc3"),
-    etap:is(Seq1_2, 6, "First row has seq 4"),
-    etap:is(Id2_2, <<"doc4">>, "Second row is for doc doc4"),
-    etap:is(Seq2_2, 4, "Second row has seq 6"),
-
-    stop(Consumer2),
-    delete_db(Db).
-
-
-test_by_doc_ids_with_since() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    {ok, Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-
-    ChangesArgs = #changes_args{
-        filter = "_doc_ids",
-        since = 5
-    },
-    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
-
-    {Rows, LastSeq} = wait_finished(Consumer),
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    UpSeq = couch_db:get_update_seq(Db2),
-    couch_db:close(Db2),
-    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows), 1, "Received 1 changes rows"),
-    [#row{seq = Seq1, id = Id1}] = Rows,
-    etap:is(Id1, <<"doc3">>, "First row is for doc doc3"),
-    etap:is(Seq1, 6, "First row has seq 6"),
-
-    stop(Consumer),
-
-    ChangesArgs2 = #changes_args{
-        filter = "_doc_ids",
-        since = 6
-    },
-    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs2, Req),
-
-    {Rows2, LastSeq2} = wait_finished(Consumer2),
-    {ok, Db3} = couch_db:open_int(test_db_name(), []),
-    UpSeq2 = couch_db:get_update_seq(Db3),
-    couch_db:close(Db3),
-    etap:is(LastSeq2, UpSeq2, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows2), 0, "Received 0 change rows"),
-
-    stop(Consumer2),
-
-    {ok, _Rev3_3} = save_doc(
-        Db,
-        {[{<<"_id">>, <<"doc3">>}, {<<"_deleted">>, true}, {<<"_rev">>, Rev3_2}]}),
-
-    ChangesArgs3 = #changes_args{
-        filter = "_doc_ids",
-        since = 9
-    },
-    Consumer3 = spawn_consumer(test_db_name(), ChangesArgs3, Req),
-
-    {Rows3, LastSeq3} = wait_finished(Consumer3),
-    {ok, Db4} = couch_db:open_int(test_db_name(), []),
-    UpSeq3 = couch_db:get_update_seq(Db4),
-    couch_db:close(Db4),
-    etap:is(LastSeq3, UpSeq3, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows3), 1, "Received 1 changes rows"),
-    etap:is(
-        [#row{seq = LastSeq3, id = <<"doc3">>, deleted = true}],
-        Rows3,
-        "Received row with doc3 deleted"),
-
-    stop(Consumer3),
-
-    delete_db(Db).
-
-
-test_by_doc_ids_continuous() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    {ok, Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    {ok, Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-
-    ChangesArgs = #changes_args{
-        filter = "_doc_ids",
-        feed = "continuous"
-    },
-    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
-
-    pause(Consumer),
-    Rows = get_rows(Consumer),
-
-    etap:is(length(Rows), 2, "Received 2 changes rows"),
-    [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-    etap:is(Id1, <<"doc4">>, "First row is for doc doc4"),
-    etap:is(Seq1, 4, "First row has seq 4"),
-    etap:is(Id2, <<"doc3">>, "Second row is for doc doc3"),
-    etap:is(Seq2, 6, "Second row has seq 6"),
-
-    clear_rows(Consumer),
-    {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
-    {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
-    unpause(Consumer),
-    pause(Consumer),
-    etap:is(get_rows(Consumer), [], "No new rows"),
-
-    {ok, Rev4_2} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}, {<<"_rev">>, Rev4}]}),
-    {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
-    {ok, _Rev4_3} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}, {<<"_rev">>, Rev4_2}]}),
-    {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
-    {ok, Rev3_3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3_2}]}),
-    unpause(Consumer),
-    pause(Consumer),
-
-    NewRows = get_rows(Consumer),
-    etap:is(length(NewRows), 2, "Received 2 new rows"),
-    [Row14, Row16] = NewRows,
-    etap:is(Row14#row.seq, 14, "First row has seq 14"),
-    etap:is(Row14#row.id, <<"doc4">>, "First row is for doc doc4"),
-    etap:is(Row16#row.seq, 16, "Second row has seq 16"),
-    etap:is(Row16#row.id, <<"doc3">>, "Second row is for doc doc3"),
-
-    clear_rows(Consumer),
-    {ok, _Rev3_4} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3_3}]}),
-    unpause(Consumer),
-    pause(Consumer),
-    etap:is(get_rows(Consumer), [#row{seq = 17, id = <<"doc3">>}],
-        "Got row for seq 17, doc doc3"),
-
-    unpause(Consumer),
-    stop(Consumer),
-    delete_db(Db).
-
-
-test_design_docs_only() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"_design/foo">>}]}),
-
-    ChangesArgs = #changes_args{
-        filter = "_design"
-    },
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
-
-    {Rows, LastSeq} = wait_finished(Consumer),
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    UpSeq = couch_db:get_update_seq(Db2),
-    couch_db:close(Db2),
-
-    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows), 1, "Received 1 changes rows"),
-    etap:is(Rows, [#row{seq = 3, id = <<"_design/foo">>}], "Received row with ddoc"),
-
-    stop(Consumer),
-
-    {ok, Db3} = couch_db:open_int(
-        test_db_name(), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]),
-    {ok, _Rev3_2} = save_doc(
-        Db3,
-        {[{<<"_id">>, <<"_design/foo">>}, {<<"_rev">>, Rev3},
-            {<<"_deleted">>, true}]}),
-
-    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
-
-    {Rows2, LastSeq2} = wait_finished(Consumer2),
-    UpSeq2 = UpSeq + 1,
-    couch_db:close(Db3),
-
-    etap:is(LastSeq2, UpSeq2, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows2), 1, "Received 1 changes rows"),
-    etap:is(
-        Rows2,
-        [#row{seq = 4, id = <<"_design/foo">>, deleted = true}],
-        "Received row with deleted ddoc"),
-
-    stop(Consumer2),
-    delete_db(Db).
-
-test_heartbeat() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _} = save_doc(Db, {[
-        {<<"_id">>, <<"_design/foo">>},
-        {<<"language">>, <<"javascript">>},
-            {<<"filters">>, {[
-                {<<"foo">>, <<"function(doc) { if ((doc._id == 'doc10') ||
-                                                  (doc._id == 'doc11') ||
-                                                  (doc._id == 'doc12')) {
-                                                return true;
-                                               } else {
-                                                  return false;
-                                               }}">>
-            }]}}
-    ]}),
-
-    ChangesArgs = #changes_args{
-        filter = "foo/foo",
-        feed = "continuous",
-        timeout = 10000,
-        heartbeat = 1000
-    },
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    timer:sleep(200),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    timer:sleep(200),
-    {ok, _Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    timer:sleep(200),
-    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    timer:sleep(200),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    timer:sleep(200),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    timer:sleep(200),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    timer:sleep(200),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-    timer:sleep(200),
-    {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
-    Heartbeats = get_heartbeats(Consumer),
-    etap:is(Heartbeats, 2, "Received 2 heartbeats now"),
-    {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
-    timer:sleep(200),
-    {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
-    timer:sleep(200),
-    {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
-    Heartbeats2 = get_heartbeats(Consumer),
-    etap:is(Heartbeats2, 3, "Received 3 heartbeats now"),
-    Rows = get_rows(Consumer),
-    etap:is(length(Rows), 3, "Received 3 changes rows"),
-
-    {ok, _Rev13} = save_doc(Db, {[{<<"_id">>, <<"doc13">>}]}),
-    timer:sleep(200),
-    {ok, _Rev14} = save_doc(Db, {[{<<"_id">>, <<"doc14">>}]}),
-    timer:sleep(200),
-    Heartbeats3 = get_heartbeats(Consumer),
-    etap:is(Heartbeats3, 6, "Received 6 heartbeats now"),
-    stop(Consumer),
-    couch_db:close(Db),
-    delete_db(Db).
-
-
-save_doc(Db, Json) ->
-    Doc = couch_doc:from_json_obj(Json),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-    {ok, couch_doc:rev_to_str(Rev)}.
-
-
-get_rows(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {get_rows, Ref},
-    receive
-    {rows, Ref, Rows} ->
-        Rows
-    after 3000 ->
-        etap:bail("Timeout getting rows from consumer")
-    end.
-
-get_heartbeats(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {get_heartbeats, Ref},
-    receive
-    {hearthbeats, Ref, HeartBeats} ->
-        HeartBeats
-    after 3000 ->
-        etap:bail("Timeout getting heartbeats from consumer")
-    end.
-
-
-clear_rows(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {reset, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout clearing consumer rows")
-    end.
-
-
-stop(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {stop, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout stopping consumer")
-    end.
-
-
-pause(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {pause, Ref},
-    receive
-    {paused, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout pausing consumer")
-    end.
-
-
-unpause(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {continue, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout unpausing consumer")
-    end.
-
-
-wait_finished(_Consumer) ->
-    receive
-    {consumer_finished, Rows, LastSeq} ->
-        {Rows, LastSeq}
-    after 30000 ->
-        etap:bail("Timeout waiting for consumer to finish")
-    end.
-
-
-spawn_consumer(DbName, ChangesArgs0, Req) ->
-    Parent = self(),
-    spawn(fun() ->
-        put(heartbeat_count, 0),
-        Callback = fun({change, {Change}, _}, _, Acc) ->
-            Id = couch_util:get_value(<<"id">>, Change),
-            Seq = couch_util:get_value(<<"seq">>, Change),
-            Del = couch_util:get_value(<<"deleted">>, Change, false),
-            [#row{id = Id, seq = Seq, deleted = Del} | Acc];
-        ({stop, LastSeq}, _, Acc) ->
-            Parent ! {consumer_finished, lists:reverse(Acc), LastSeq},
-            stop_loop(Parent, Acc);
-        (timeout, _, Acc) ->
-            put(heartbeat_count, get(heartbeat_count) + 1),
-            maybe_pause(Parent, Acc);
-        (_, _, Acc) ->
-            maybe_pause(Parent, Acc)
-        end,
-        {ok, Db} = couch_db:open_int(DbName, []),
-        ChangesArgs = case (ChangesArgs0#changes_args.timeout =:= undefined)
-            andalso (ChangesArgs0#changes_args.heartbeat =:= undefined) of
-        true ->
-            ChangesArgs0#changes_args{timeout = 10, heartbeat = 10};
-        false ->
-            ChangesArgs0
-        end,
-        FeedFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
-        try
-            FeedFun({Callback, []})
-        catch throw:{stop, _} ->
-            ok
-        end,
-        catch couch_db:close(Db)
-    end).
-
-
-maybe_pause(Parent, Acc) ->
-    receive
-    {get_rows, Ref} ->
-        Parent ! {rows, Ref, lists:reverse(Acc)},
-        maybe_pause(Parent, Acc);
-    {get_heartbeats, Ref} ->
-        Parent ! {hearthbeats, Ref, get(heartbeat_count)},
-        maybe_pause(Parent, Acc);
-    {reset, Ref} ->
-        Parent ! {ok, Ref},
-        maybe_pause(Parent, []);
-    {pause, Ref} ->
-        Parent ! {paused, Ref},
-        pause_loop(Parent, Acc);
-    {stop, Ref} ->
-        Parent ! {ok, Ref},
-        throw({stop, Acc})
-    after 0 ->
-        Acc
-    end.
-
-
-pause_loop(Parent, Acc) ->
-    receive
-    {stop, Ref} ->
-        Parent ! {ok, Ref},
-        throw({stop, Acc});
-    {reset, Ref} ->
-        Parent ! {ok, Ref},
-        pause_loop(Parent, []);
-    {continue, Ref} ->
-        Parent ! {ok, Ref},
-        Acc;
-    {get_rows, Ref} ->
-        Parent ! {rows, Ref, lists:reverse(Acc)},
-        pause_loop(Parent, Acc)
-    end.
-
-
-stop_loop(Parent, Acc) ->
-    receive
-    {get_rows, Ref} ->
-        Parent ! {rows, Ref, lists:reverse(Acc)},
-        stop_loop(Parent, Acc);
-    {stop, Ref} ->
-        Parent ! {ok, Ref},
-        Acc
-    end.
-
-
-create_db(DbName) ->
-    couch_db:create(
-        DbName,
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]).
-
-
-delete_db(Db) ->
-    ok = couch_server:delete(
-        couch_db:name(Db), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/074-doc-update-conflicts.t
----------------------------------------------------------------------
diff --git a/test/etap/074-doc-update-conflicts.t b/test/etap/074-doc-update-conflicts.t
deleted file mode 100755
index 09d0633..0000000
--- a/test/etap/074-doc-update-conflicts.t
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--define(i2l(I), integer_to_list(I)).
-
-test_db_name() -> <<"couch_test_update_conflicts">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(35),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    couch_config:set("couchdb", "delayed_commits", "true", false),
-
-    lists:foreach(
-        fun(NumClients) -> test_concurrent_doc_update(NumClients) end,
-        [100, 500, 1000, 2000, 5000]),
-
-    test_bulk_delete_create(),
-
-    couch_server_sup:stop(),
-    ok.
-
-
-% Verify that if multiple clients try to update the same document
-% simultaneously, only one of them will get success response and all
-% the other ones will get a conflict error. Also validate that the
-% client which got the success response got its document version
-% persisted into the database.
-test_concurrent_doc_update(NumClients) ->
-    {ok, Db} = create_db(test_db_name()),
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"value">>, 0}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-    ok = couch_db:close(Db),
-    RevStr = couch_doc:rev_to_str(Rev),
-    etap:diag("Created first revision of test document"),
-
-    etap:diag("Spawning " ++ ?i2l(NumClients) ++
-        " clients to update the document"),
-    Clients = lists:map(
-        fun(Value) ->
-            ClientDoc = couch_doc:from_json_obj({[
-                {<<"_id">>, <<"foobar">>},
-                {<<"_rev">>, RevStr},
-                {<<"value">>, Value}
-            ]}),
-            Pid = spawn_client(ClientDoc),
-            {Value, Pid, erlang:monitor(process, Pid)}
-        end,
-        lists:seq(1, NumClients)),
-
-    lists:foreach(fun({_, Pid, _}) -> Pid ! go end, Clients),
-    etap:diag("Waiting for clients to finish"),
-
-    {NumConflicts, SavedValue} = lists:foldl(
-        fun({Value, Pid, MonRef}, {AccConflicts, AccValue}) ->
-            receive
-            {'DOWN', MonRef, process, Pid, {ok, _NewRev}} ->
-                {AccConflicts, Value};
-            {'DOWN', MonRef, process, Pid, conflict} ->
-                {AccConflicts + 1, AccValue};
-            {'DOWN', MonRef, process, Pid, Error} ->
-                etap:bail("Client " ++ ?i2l(Value) ++
-                    " got update error: " ++ couch_util:to_list(Error))
-            after 60000 ->
-                etap:bail("Timeout waiting for client " ++ ?i2l(Value) ++ " to die")
-            end
-        end,
-        {0, nil},
-        Clients),
-
-    etap:diag("Verifying client results"),
-    etap:is(
-        NumConflicts,
-        NumClients - 1,
-        "Got " ++ ?i2l(NumClients - 1) ++ " client conflicts"),
-
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    {ok, Leaves} = couch_db:open_doc_revs(Db2, <<"foobar">>, all, []),
-    ok = couch_db:close(Db2),
-    etap:is(length(Leaves), 1, "Only one document revision was persisted"),
-    [{ok, Doc2}] = Leaves,
-    {JsonDoc} = couch_doc:to_json_obj(Doc2, []),
-    etap:is(
-        couch_util:get_value(<<"value">>, JsonDoc),
-        SavedValue,
-        "Persisted doc has the right value"),
-
-    ok = timer:sleep(1000),
-    etap:diag("Restarting the server"),
-    couch_server_sup:stop(),
-    ok = timer:sleep(1000),
-    couch_server_sup:start_link(test_util:config_files()),
-
-    {ok, Db3} = couch_db:open_int(test_db_name(), []),
-    {ok, Leaves2} = couch_db:open_doc_revs(Db3, <<"foobar">>, all, []),
-    ok = couch_db:close(Db3),
-    etap:is(length(Leaves2), 1, "Only one document revision was persisted"),
-    [{ok, Doc3}] = Leaves,
-    etap:is(Doc3, Doc2, "Got same document after server restart"),
-
-    delete_db(Db3).
-
-
-% COUCHDB-188
-test_bulk_delete_create() ->
-    {ok, Db} = create_db(test_db_name()),
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"value">>, 0}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-
-    DeletedDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
-        {<<"_deleted">>, true}
-    ]}),
-    NewDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"value">>, 666}
-    ]}),
-
-    {ok, Results} = couch_db:update_docs(Db, [DeletedDoc, NewDoc], []),
-    ok = couch_db:close(Db),
-
-    etap:is(length([ok || {ok, _} <- Results]), 2,
-        "Deleted and non-deleted versions got an ok reply"),
-
-    [{ok, Rev1}, {ok, Rev2}] = Results,
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-
-    {ok, [{ok, Doc1}]} = couch_db:open_doc_revs(
-        Db2, <<"foobar">>, [Rev1], [conflicts, deleted_conflicts]),
-    {ok, [{ok, Doc2}]} = couch_db:open_doc_revs(
-        Db2, <<"foobar">>, [Rev2], [conflicts, deleted_conflicts]),
-    ok = couch_db:close(Db2),
-
-    {Doc1Props} = couch_doc:to_json_obj(Doc1, []),
-    {Doc2Props} = couch_doc:to_json_obj(Doc2, []),
-
-    etap:is(couch_util:get_value(<<"_deleted">>, Doc1Props), true,
-        "Document was deleted"),
-    etap:is(couch_util:get_value(<<"_deleted">>, Doc2Props), undefined,
-        "New document not flagged as deleted"),
-    etap:is(couch_util:get_value(<<"value">>, Doc2Props), 666,
-        "New leaf revision has the right value"),
-    etap:is(couch_util:get_value(<<"_conflicts">>, Doc1Props), undefined,
-        "Deleted document has no conflicts"),
-    etap:is(couch_util:get_value(<<"_deleted_conflicts">>, Doc1Props), undefined,
-        "Deleted document has no deleted conflicts"),
-    etap:is(couch_util:get_value(<<"_conflicts">>, Doc2Props), undefined,
-        "New leaf revision doesn't have conflicts"),
-    etap:is(couch_util:get_value(<<"_deleted_conflicts">>, Doc2Props), undefined,
-        "New leaf revision doesn't have deleted conflicts"),
-
-    etap:is(element(1, Rev1), 2, "Deleted revision has position 2"),
-    etap:is(element(1, Rev2), 1, "New leaf revision has position 1"),
-
-    delete_db(Db2).
-
-
-spawn_client(Doc) ->
-    spawn(fun() ->
-        {ok, Db} = couch_db:open_int(test_db_name(), []),
-        receive go -> ok end,
-        erlang:yield(),
-        Result = try
-            couch_db:update_doc(Db, Doc, [])
-        catch _:Error ->
-            Error
-        end,
-        ok = couch_db:close(Db),
-        exit(Result)
-    end).
-
-
-create_db(DbName) ->
-    couch_db:create(
-        DbName,
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]).
-
-
-delete_db(Db) ->
-    ok = couch_server:delete(
-        couch_db:name(Db), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/075-auth-cache.t
----------------------------------------------------------------------
diff --git a/test/etap/075-auth-cache.t b/test/etap/075-auth-cache.t
deleted file mode 100755
index 623884b..0000000
--- a/test/etap/075-auth-cache.t
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--record(db, {
-    main_pid = nil,
-    update_pid = nil,
-    compactor_pid = nil,
-    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
-    fd,
-    updater_fd,
-    fd_ref_counter,
-    header,
-    committed_update_seq,
-    fulldocinfo_by_id_btree,
-    docinfo_by_seq_btree,
-    local_docs_btree,
-    update_seq,
-    name,
-    filepath,
-    validate_doc_funs = [],
-    security = [],
-    security_ptr = nil,
-    user_ctx = #user_ctx{},
-    waiting_delayed_commit = nil,
-    revs_limit = 1000,
-    fsync_options = [],
-    options = [],
-    compression,
-    before_doc_update = nil, % nil | fun(Doc, Db) -> NewDoc
-    after_doc_read = nil     % nil | fun(Doc, Db) -> NewDoc
-}).
-
-auth_db_name() -> <<"couch_test_auth_db">>.
-auth_db_2_name() -> <<"couch_test_auth_db_2">>.
-salt() -> <<"SALT">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(19),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    OrigName = couch_config:get("couch_httpd_auth", "authentication_db"),
-    couch_config:set(
-        "couch_httpd_auth", "authentication_db",
-        binary_to_list(auth_db_name()), false),
-    delete_db(auth_db_name()),
-    delete_db(auth_db_2_name()),
-
-    test_auth_db_crash(),
-
-    couch_config:set("couch_httpd_auth", "authentication_db", OrigName, false),
-    delete_db(auth_db_name()),
-    delete_db(auth_db_2_name()),
-    couch_server_sup:stop(),
-    ok.
-
-
-test_auth_db_crash() ->
-    Creds0 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(Creds0, nil, "Got nil when getting joe's credentials"),
-
-    etap:diag("Adding first version of Joe's user doc"),
-    PasswordHash1 = hash_password("pass1"),
-    {ok, Rev1} = update_user_doc(auth_db_name(), "joe", "pass1"),
-
-    Creds1 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds1), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds1), PasswordHash1,
-            "Cached credentials have the right password"),
-
-    etap:diag("Updating Joe's user doc password"),
-    PasswordHash2 = hash_password("pass2"),
-    {ok, _Rev2} = update_user_doc(auth_db_name(), "joe", "pass2", Rev1),
-
-    Creds2 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds2), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds2), PasswordHash2,
-            "Cached credentials have the new password"),
-
-    etap:diag("Shutting down the auth database process"),
-    shutdown_db(auth_db_name()),
-
-    {ok, UpdateRev} = get_doc_rev(auth_db_name(), "joe"),
-    PasswordHash3 = hash_password("pass3"),
-    {ok, _Rev3} = update_user_doc(auth_db_name(), "joe", "pass3", UpdateRev),
-
-    etap:is(get_user_doc_password_sha(auth_db_name(), "joe"),
-            PasswordHash3,
-            "Latest Joe's doc revision has the new password hash"),
-
-    Creds3 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds3), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds3), PasswordHash3,
-            "Cached credentials have the new password"),
-
-    etap:diag("Deleting Joe's user doc"),
-    delete_user_doc(auth_db_name(), "joe"),
-    Creds4 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(nil, Creds4,
-            "Joe's credentials not found in cache after user doc was deleted"),
-
-    etap:diag("Adding new user doc for Joe"),
-    PasswordHash5 = hash_password("pass5"),
-    {ok, _NewRev1} = update_user_doc(auth_db_name(), "joe", "pass5"),
-
-    Creds5 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds5), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds5), PasswordHash5,
-            "Cached credentials have the right password"),
-
-    full_commit(auth_db_name()),
-
-    etap:diag("Changing the auth database"),
-    couch_config:set(
-        "couch_httpd_auth", "authentication_db",
-        binary_to_list(auth_db_2_name()), false),
-    ok = timer:sleep(500),
-
-    Creds6 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(nil, Creds6,
-            "Joe's credentials not found in cache after auth database changed"),
-
-    etap:diag("Adding first version of Joe's user doc to new auth database"),
-    PasswordHash7 = hash_password("pass7"),
-    {ok, _} = update_user_doc(auth_db_2_name(), "joe", "pass7"),
-
-    Creds7 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds7), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds7), PasswordHash7,
-            "Cached credentials have the right password"),
-
-    etap:diag("Shutting down the auth database process"),
-    shutdown_db(auth_db_2_name()),
-
-    {ok, UpdateRev2} = get_doc_rev(auth_db_2_name(), "joe"),
-    PasswordHash8 = hash_password("pass8"),
-    {ok, _Rev8} = update_user_doc(auth_db_2_name(), "joe", "pass8", UpdateRev2),
-
-    etap:is(get_user_doc_password_sha(auth_db_2_name(), "joe"),
-            PasswordHash8,
-            "Latest Joe's doc revision has the new password hash"),
-
-    Creds8 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds8), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds8), PasswordHash8,
-            "Cached credentials have the new password"),
-
-    etap:diag("Changing the auth database again"),
-    couch_config:set(
-        "couch_httpd_auth", "authentication_db",
-        binary_to_list(auth_db_name()), false),
-    ok = timer:sleep(500),
-
-    Creds9 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(Creds9, Creds5,
-            "Got same credentials as before the firt auth database change"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds9), PasswordHash5,
-            "Cached credentials have the right password"),
-    ok.
-
-
-update_user_doc(DbName, UserName, Password) ->
-    update_user_doc(DbName, UserName, Password, nil).
-
-update_user_doc(DbName, UserName, Password, Rev) ->
-    User = iolist_to_binary(UserName),
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"org.couchdb.user:", User/binary>>},
-        {<<"name">>, User},
-        {<<"type">>, <<"user">>},
-        {<<"salt">>, salt()},
-        {<<"password_sha">>, hash_password(Password)},
-        {<<"roles">>, []}
-    ] ++ case Rev of
-        nil -> [];
-        _ ->   [{<<"_rev">>, Rev}]
-    end}),
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, NewRev} = couch_db:update_doc(AuthDb, Doc, []),
-    ok = couch_db:close(AuthDb),
-    {ok, couch_doc:rev_to_str(NewRev)}.
-
-
-hash_password(Password) ->
-    list_to_binary(
-        couch_util:to_hex(crypto:sha(iolist_to_binary([Password, salt()])))).
-
-
-shutdown_db(DbName) ->
-    {ok, AuthDb} = open_auth_db(DbName),
-    ok = couch_db:close(AuthDb),
-    couch_util:shutdown_sync(AuthDb#db.main_pid),
-    ok = timer:sleep(1000).
-
-
-get_doc_rev(DbName, UserName) ->
-    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
-    {ok, AuthDb} = open_auth_db(DbName),
-    UpdateRev =
-    case couch_db:open_doc(AuthDb, DocId, []) of
-    {ok, Doc} ->
-        {Props} = couch_doc:to_json_obj(Doc, []),
-        couch_util:get_value(<<"_rev">>, Props);
-    {not_found, missing} ->
-        nil
-    end,
-    ok = couch_db:close(AuthDb),
-    {ok, UpdateRev}.
-
-
-get_user_doc_password_sha(DbName, UserName) ->
-    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
-    ok = couch_db:close(AuthDb),
-    {Props} = couch_doc:to_json_obj(Doc, []),
-    couch_util:get_value(<<"password_sha">>, Props).
-
-
-delete_user_doc(DbName, UserName) ->
-    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
-    {Props} = couch_doc:to_json_obj(Doc, []),
-    DeletedDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, DocId},
-        {<<"_rev">>, couch_util:get_value(<<"_rev">>, Props)},
-        {<<"_deleted">>, true}
-    ]}),
-    {ok, _} = couch_db:update_doc(AuthDb, DeletedDoc, []),
-    ok = couch_db:close(AuthDb).
-
-
-full_commit(DbName) ->
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, _} = couch_db:ensure_full_commit(AuthDb),
-    ok = couch_db:close(AuthDb).
-
-
-open_auth_db(DbName) ->
-    couch_db:open_int(
-        DbName, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).
-
-
-delete_db(Name) ->
-    couch_server:delete(
-        Name, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).


[07/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/210-os-proc-pool.t
----------------------------------------------------------------------
diff --git a/src/test/etap/210-os-proc-pool.t b/src/test/etap/210-os-proc-pool.t
new file mode 100755
index 0000000..d80707e
--- /dev/null
+++ b/src/test/etap/210-os-proc-pool.t
@@ -0,0 +1,163 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(21),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    couch_config:set("query_server_config", "os_process_limit", "3", false),
+
+    test_pool_full(),
+    test_client_unexpected_exit(),
+
+    couch_server_sup:stop(),
+    ok.
+
+
+test_pool_full() ->
+    Client1 = spawn_client(),
+    Client2 = spawn_client(),
+    Client3 = spawn_client(),
+
+    etap:diag("Check that we can spawn the max number of processes."),
+    etap:is(ping_client(Client1), ok, "Client 1 started ok."),
+    etap:is(ping_client(Client2), ok, "Client 2 started ok."),
+    etap:is(ping_client(Client3), ok, "Client 3 started ok."),
+
+    Proc1 = get_client_proc(Client1, "1"),
+    Proc2 = get_client_proc(Client2, "2"),
+    Proc3 = get_client_proc(Client3, "3"),
+    etap:isnt(Proc1, Proc2, "Clients 1 and 2 got different procs."),
+    etap:isnt(Proc2, Proc3, "Clients 2 and 3 got different procs."),
+    etap:isnt(Proc1, Proc3, "Clients 1 and 3 got different procs."),
+
+    etap:diag("Check that client 4 blocks waiting for a process."),
+    Client4 = spawn_client(),
+    etap:is(ping_client(Client4), timeout, "Client 4 blocked while waiting."),
+
+    etap:diag("Check that stopping a client gives up its process."),
+    etap:is(stop_client(Client1), ok, "First client stopped."),
+
+    etap:diag("And check that our blocked process has been unblocked."),
+    etap:is(ping_client(Client4), ok, "Client was unblocked."),
+
+    Proc4 = get_client_proc(Client4, "4"),
+    etap:is(Proc4, Proc1, "Client 4 got proc that client 1 got before."),
+
+    lists:map(fun(C) -> ok = stop_client(C) end, [Client2, Client3, Client4]).
+
+
+test_client_unexpected_exit() ->
+    Client1 = spawn_client(),
+    Client2 = spawn_client(),
+    Client3 = spawn_client(),
+
+    etap:diag("Check that up to os_process_limit clients started."),
+    etap:is(ping_client(Client1), ok, "Client 1 started ok."),
+    etap:is(ping_client(Client2), ok, "Client 2 started ok."),
+    etap:is(ping_client(Client3), ok, "Client 3 started ok."),
+
+    Proc1 = get_client_proc(Client1, "1"),
+    Proc2 = get_client_proc(Client2, "2"),
+    Proc3 = get_client_proc(Client3, "3"),
+    etap:isnt(Proc1, Proc2, "Clients 1 and 2 got different procs."),
+    etap:isnt(Proc2, Proc3, "Clients 2 and 3 got different procs."),
+    etap:isnt(Proc1, Proc3, "Clients 1 and 3 got different procs."),
+
+    etap:diag("Check that killing a client frees an os_process."),
+    etap:is(kill_client(Client1), ok, "Client 1 died all right."),
+
+    etap:diag("Check that a new client is not blocked on boot."),
+    Client4 = spawn_client(),
+    etap:is(ping_client(Client4), ok, "New client booted without blocking."),
+
+    Proc4 = get_client_proc(Client4, "4"),
+    etap:isnt(Proc4, Proc1,
+        "Client 4 got a proc different from the one client 1 got before."),
+    etap:isnt(Proc4, Proc2, "Client 4's proc different from client 2's proc."),
+    etap:isnt(Proc4, Proc3, "Client 4's proc different from client 3's proc."),
+
+    lists:map(fun(C) -> ok = stop_client(C) end, [Client2, Client3, Client4]).
+
+
+spawn_client() ->
+    Parent = self(),
+    Ref = make_ref(),
+    Pid = spawn(fun() ->
+        Proc = couch_query_servers:get_os_process(<<"javascript">>),
+        loop(Parent, Ref, Proc)
+    end),
+    {Pid, Ref}.
+
+
+ping_client({Pid, Ref}) ->
+    Pid ! ping,
+    receive
+        {pong, Ref} -> ok
+        after 3000 -> timeout
+    end.
+
+
+get_client_proc({Pid, Ref}, ClientName) ->
+    Pid ! get_proc,
+    receive
+        {proc, Ref, Proc} -> Proc
+    after 3000 ->
+        etap:bail("Timeout getting client " ++ ClientName ++ " proc.")
+    end.
+
+
+stop_client({Pid, Ref}) ->
+    Pid ! stop,
+    receive
+        {stop, Ref} -> ok
+        after 3000 -> timeout
+    end.
+
+
+kill_client({Pid, Ref}) ->
+    Pid ! die,
+    receive
+        {die, Ref} -> ok
+        after 3000 -> timeout
+    end.
+
+
+loop(Parent, Ref, Proc) ->
+    receive
+        ping ->
+            Parent ! {pong, Ref},
+            loop(Parent, Ref, Proc);
+        get_proc  ->
+            Parent ! {proc, Ref, Proc},
+            loop(Parent, Ref, Proc);
+        stop ->
+            couch_query_servers:ret_os_process(Proc),
+            Parent ! {stop, Ref};
+        die ->
+            Parent ! {die, Ref},
+            exit(some_error)
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/220-compaction-daemon.t
----------------------------------------------------------------------
diff --git a/src/test/etap/220-compaction-daemon.t b/src/test/etap/220-compaction-daemon.t
new file mode 100755
index 0000000..4c63b66
--- /dev/null
+++ b/src/test/etap/220-compaction-daemon.t
@@ -0,0 +1,225 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+test_db_name() ->
+    <<"couch_test_compaction_daemon">>.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(10),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    timer:sleep(1000),
+    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
+
+    disable_compact_daemon(),
+
+    delete_db(),
+    {ok, Db} = create_db(),
+
+    add_design_doc(Db),
+    couch_db:close(Db),
+    populate(70, 70, 200 * 1024),
+
+    {_, DbFileSize} = get_db_frag(),
+    {_, ViewFileSize} = get_view_frag(),
+
+    % enable automatic compaction
+    ok = couch_config:set("compaction_daemon", "check_interval", "3", false),
+    ok = couch_config:set("compaction_daemon", "min_file_size", "100000", false),
+    ok = couch_config:set(
+        "compactions",
+        binary_to_list(test_db_name()),
+        "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]",
+        false),
+
+    ok = timer:sleep(4000), % something >= check_interval
+    wait_compaction_finished(),
+
+    {DbFrag2, DbFileSize2} = get_db_frag(),
+    {ViewFrag2, ViewFileSize2} = get_view_frag(),
+
+    etap:is(true, (DbFrag2 < 70), "Database fragmentation is < 70% after compaction"),
+    etap:is(true, (ViewFrag2 < 70), "View fragmentation is < 70% after compaction"),
+    etap:is(true, (DbFileSize2 < DbFileSize), "Database file size decreased"),
+    etap:is(true, (ViewFileSize2 < ViewFileSize), "View file size decreased"),
+
+    disable_compact_daemon(),
+    ok = timer:sleep(6000), % 2 times check_interval
+    etap:is(couch_db:is_idle(Db), true, "Database is idle"),
+    populate(70, 70, 200 * 1024),
+    {_, DbFileSize3} = get_db_frag(),
+    {_, ViewFileSize3} = get_view_frag(),
+
+    % enable automatic compaction
+    ok = couch_config:set(
+        "compactions",
+        "_default",
+        "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]",
+        false),
+
+    ok = timer:sleep(4000), % something >= check_interval
+    wait_compaction_finished(),
+
+    {DbFrag4, DbFileSize4} = get_db_frag(),
+    {ViewFrag4, ViewFileSize4} = get_view_frag(),
+
+    etap:is(true, (DbFrag4 < 70), "Database fragmentation is < 70% after compaction"),
+    etap:is(true, (ViewFrag4 < 70), "View fragmentation is < 70% after compaction"),
+    etap:is(true, (DbFileSize4 < DbFileSize3), "Database file size decreased again"),
+    etap:is(true, (ViewFileSize4 < ViewFileSize3), "View file size decreased again"),
+
+    ok = timer:sleep(6000), % 2 times check_interval
+    etap:is(couch_db:is_idle(Db), true, "Database is idle"),
+
+    delete_db(),
+    couch_server_sup:stop(),
+    ok.
+
+disable_compact_daemon() ->
+    Configs = couch_config:get("compactions"),
+    lists:foreach(
+        fun({DbName, _}) ->
+            ok = couch_config:delete("compactions", DbName, false)
+        end,
+        Configs).
+
+admin_user_ctx() ->
+    {user_ctx, #user_ctx{roles = [<<"_admin">>]}}.
+
+create_db() ->
+    {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
+
+delete_db() ->
+    couch_server:delete(test_db_name(), [admin_user_ctx()]).
+
+add_design_doc(Db) ->
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/foo">>},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"foo">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}},
+            {<<"foo2">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}},
+            {<<"foo3">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [DDoc]),
+    {ok, _} = couch_db:ensure_full_commit(Db),
+    ok.
+
+populate(DbFrag, ViewFrag, MinFileSize) ->
+    {CurDbFrag, DbFileSize} = get_db_frag(),
+    {CurViewFrag, ViewFileSize} = get_view_frag(),
+    populate(
+        DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag,
+        lists:min([DbFileSize, ViewFileSize])).
+
+populate(DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, FileSize)
+    when CurDbFrag >= DbFrag, CurViewFrag >= ViewFrag, FileSize >= MinFileSize ->
+    ok;
+populate(DbFrag, ViewFrag, MinFileSize, _, _, _) ->
+    update(),
+    {CurDbFrag, DbFileSize} = get_db_frag(),
+    {CurViewFrag, ViewFileSize} = get_view_frag(),
+    populate(
+        DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag,
+        lists:min([DbFileSize, ViewFileSize])).
+
+update() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    lists:foreach(fun(_) ->
+        Doc = couch_doc:from_json_obj({[{<<"_id">>, couch_uuids:new()}]}),
+        {ok, _} = couch_db:update_docs(Db, [Doc]),
+        query_view()
+    end, lists:seq(1, 100)),
+    couch_db:close(Db).
+
+db_url() ->
+    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+        binary_to_list(test_db_name()).
+
+query_view() ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/_design/foo/_view/foo", [], get),
+    case Code of
+    200 ->
+        ok;
+    _ ->
+        etap:bail("error querying view")
+    end.
+
+get_db_frag() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, Info} = couch_db:get_db_info(Db),
+    couch_db:close(Db),
+    FileSize = couch_util:get_value(disk_size, Info),
+    DataSize = couch_util:get_value(data_size, Info),
+    {round((FileSize - DataSize) / FileSize * 100), FileSize}.
+
+get_view_frag() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, Info} = couch_mrview:get_info(Db, <<"_design/foo">>),
+    couch_db:close(Db),
+    FileSize = couch_util:get_value(disk_size, Info),
+    DataSize = couch_util:get_value(data_size, Info),
+    {round((FileSize - DataSize) / FileSize * 100), FileSize}.
+
+
+wait_compaction_finished() ->
+    Parent = self(),
+    Loop = spawn_link(fun() -> wait_loop(Parent) end),
+    receive
+    {done, Loop} ->
+        etap:diag("Database and view compaction have finished")
+    after 60000 ->
+        etap:bail("Compaction not triggered")
+    end.
+
+wait_loop(Parent) ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, DbInfo} = couch_db:get_db_info(Db),
+    {ok, ViewInfo} = couch_mrview:get_info(Db, <<"_design/foo">>),
+    couch_db:close(Db),
+    case (couch_util:get_value(compact_running, ViewInfo) =:= true) orelse
+        (couch_util:get_value(compact_running, DbInfo) =:= true) of
+    false ->
+        Parent ! {done, self()};
+    true ->
+        ok = timer:sleep(500),
+        wait_loop(Parent)
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/230-pbkfd2.t
----------------------------------------------------------------------
diff --git a/src/test/etap/230-pbkfd2.t b/src/test/etap/230-pbkfd2.t
new file mode 100644
index 0000000..d980ef6
--- /dev/null
+++ b/src/test/etap/230-pbkfd2.t
@@ -0,0 +1,38 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(6),
+    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 1, 20),
+            {ok, <<"0c60c80f961f0e71f3a9b524af6012062fe037a6">>},
+            "test vector #1"),
+    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 2, 20),
+            {ok, <<"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957">>},
+            "test vector #2"),
+    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 4096, 20),
+            {ok, <<"4b007901b765489abead49d926f721d065a429c1">>},
+            "test vector #3"),
+    etap:is(couch_passwords:pbkdf2(<<"passwordPASSWORDpassword">>,
+                                                     <<"saltSALTsaltSALTsaltSALTsaltSALTsalt">>, 4096, 25),
+            {ok, <<"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038">>},
+            "test vector #4"),
+    etap:is(couch_passwords:pbkdf2(<<"pass\0word">>, <<"sa\0lt">>, 4096, 16),
+            {ok, <<"56fa6aa75548099dcc37d7f03425e0c3">>},
+            "test vector #5"),
+    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 16777216, 20),
+            {ok, <<"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984">>},
+            "test vector #6"),
+    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/231-cors.t
----------------------------------------------------------------------
diff --git a/src/test/etap/231-cors.t b/src/test/etap/231-cors.t
new file mode 100644
index 0000000..dd08ca8
--- /dev/null
+++ b/src/test/etap/231-cors.t
@@ -0,0 +1,433 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+
+-define(SUPPORTED_METHODS, "GET, HEAD, POST, PUT, DELETE, TRACE, CONNECT, COPY, OPTIONS").
+server() ->
+    lists:concat([
+        "http://127.0.0.1:",
+        mochiweb_socket_server:get(couch_httpd, port),
+        "/"
+    ]).
+
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(28),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+dbname() -> "etap-test-db".
+dbname1() -> "etap-test-db1".
+dbname2() -> "etap-test-db2".
+
+admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+set_admin_password(UserName, Password) ->
+    Hashed = couch_passwords:hash_admin_password(Password),
+    couch_config:set("admins", UserName, Hashed, false).
+
+cycle_db(DbName) ->
+    couch_server:delete(list_to_binary(DbName), [admin_user_ctx()]),
+    {ok, Db} = couch_db:create(list_to_binary(DbName), [admin_user_ctx()]),
+    Db.
+
+test() ->
+
+    ibrowse:start(),
+    crypto:start(),
+
+    %% launch couchdb
+    couch_server_sup:start_link(test_util:config_files()),
+
+    %% initialize db
+    timer:sleep(1000),
+    Db = cycle_db(dbname()),
+    Db1 = cycle_db(dbname1()),
+    Db2 = cycle_db(dbname2()),
+
+    % CORS is disabled by default
+    test_no_headers_server(),
+    test_no_headers_db(),
+
+    % Now enable CORS
+    ok = couch_config:set("httpd", "enable_cors", "true", false),
+    ok = couch_config:set("cors", "origins", "http://example.com", false),
+
+    %% do tests
+    test_incorrect_origin_simple_request(),
+    test_incorrect_origin_preflight_request(),
+
+    test_preflight_request(),
+    test_db_request(),
+    test_doc_with_attachment_request(),
+    test_doc_with_attachment_range_request(),
+    test_db_preflight_request(),
+    test_db1_origin_request(),
+    test_preflight_with_port1(),
+    test_preflight_with_scheme1(),
+
+    ok = couch_config:set("cors", "origins", "http://example.com:5984", false),
+    test_preflight_with_port2(),
+
+    ok = couch_config:set("cors", "origins", "https://example.com:5984", false),
+    test_preflight_with_scheme2(),
+
+    ok = couch_config:set("cors", "origins", "*", false),
+    test_preflight_with_wildcard(),
+
+    ok = couch_config:set("cors", "origins", "http://example.com", false),
+    test_case_sensitive_mismatch_of_allowed_origins(),
+
+    % http://www.w3.org/TR/cors/#supports-credentials
+    % 6.1.3
+    % If the resource supports credentials add a single
+    % Access-Control-Allow-Origin header, with the value
+    % of the Origin header as value, and add a single
+    % Access-Control-Allow-Credentials header with the
+    % case-sensitive string "true" as value.
+    % Otherwise, add a single Access-Control-Allow-Origin
+    % header, with either the value of the Origin header
+    % or the string "*" as value.
+    % Note: The string "*" cannot be used for a resource
+    % that supports credentials.
+    test_db_request_credentials_header_off(),
+    ok = couch_config:set("cors", "credentials", "true", false),
+    test_db_request_credentials_header_on(),
+    % We don’t test wildcards & credentials as that would
+    % fall into the realm of validating config values
+    % which we don’t do at all yet
+
+    % test with vhosts
+    ok = couch_config:set("vhosts", "example.com", "/", false),
+    test_preflight_request(true),
+    test_db_request(true),
+    test_db_preflight_request(true),
+    test_db1_origin_request(true),
+    test_preflight_with_port1(true),
+    test_preflight_with_scheme1(true),
+
+    % TBD
+    % test multiple per-host configuration
+
+    %% do tests with auth
+    ok = set_admin_password("test", "test"),
+
+    test_db_preflight_auth_request(),
+    test_db_origin_auth_request(),
+
+
+    %% restart boilerplate
+    catch couch_db:close(Db),
+    catch couch_db:close(Db1),
+    catch couch_db:close(Db2),
+
+    couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
+    couch_server:delete(list_to_binary(dbname1()), [admin_user_ctx()]),
+    couch_server:delete(list_to_binary(dbname2()), [admin_user_ctx()]),
+
+    timer:sleep(3000),
+    couch_server_sup:stop(),
+    ok.
+
+test_preflight_request() -> test_preflight_request(false).
+test_db_request() -> test_db_request(false).
+test_db_preflight_request() -> test_db_preflight_request(false).
+test_db1_origin_request() -> test_db1_origin_request(false).
+test_preflight_with_port1() -> test_preflight_with_port1(false).
+test_preflight_with_scheme1() -> test_preflight_with_scheme1(false).
+
+%% Cors is disabled, should not return Access-Control-Allow-Origin
+test_no_headers_server() ->
+    Headers = [{"Origin", "http://127.0.0.1"}],
+    {ok, _, Resp, _} = ibrowse:send_req(server(), Headers, get, []),
+    etap:is(proplists:get_value("Access-Control-Allow-Origin", Resp),
+            undefined, "No CORS Headers when disabled").
+
+%% Cors is disabled, should not return Access-Control-Allow-Origin
+test_no_headers_db() ->
+    Headers = [{"Origin", "http://127.0.0.1"}],
+    Url = server() ++ "etap-test-db",
+    {ok, _, Resp, _} = ibrowse:send_req(Url, Headers, get, []),
+    etap:is(proplists:get_value("Access-Control-Allow-Origin", Resp),
+            undefined, "No CORS Headers when disabled").
+
+test_incorrect_origin_simple_request() ->
+    Headers = [{"Origin", "http://127.0.0.1"}],
+    {ok, _, RespHeaders, _} = ibrowse:send_req(server(), Headers, get, []),
+    etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            undefined,
+            "Specified invalid origin, no Access").
+
+test_incorrect_origin_preflight_request() ->
+    Headers = [{"Origin", "http://127.0.0.1"},
+               {"Access-Control-Request-Method", "GET"}],
+    {ok, _, RespHeaders, _} = ibrowse:send_req(server(), Headers, options, []),
+    etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            undefined,
+            "invalid origin").
+
+test_preflight_request(VHost) ->
+    Headers = [{"Origin", "http://example.com"},
+               {"Access-Control-Request-Method", "GET"}]
+               ++ maybe_append_vhost(VHost),
+
+    case ibrowse:send_req(server(), Headers, options, []) of
+    {ok, _, RespHeaders, _}  ->
+        etap:is(proplists:get_value("Access-Control-Allow-Methods", RespHeaders),
+            ?SUPPORTED_METHODS,
+            "test_preflight_request Access-Control-Allow-Methods ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_db_request(VHost) ->
+    Headers = [{"Origin", "http://example.com"}]
+               ++ maybe_append_vhost(VHost),
+    Url = server() ++ "etap-test-db",
+    case ibrowse:send_req(Url, Headers, get, []) of
+    {ok, _, RespHeaders, _Body} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            "http://example.com",
+            "db Access-Control-Allow-Origin ok"),
+        etap:is(proplists:get_value("Access-Control-Expose-Headers", RespHeaders),
+            "Cache-Control, Content-Type, Server",
+            "db Access-Control-Expose-Headers ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+% COUCHDB-1689
+test_doc_with_attachment_request() ->
+    DocUrl = server() ++ "etap-test-db/doc1",
+    ibrowse:send_req(DocUrl ++ "/attachment.txt",
+        [{"Content-Type", "text/plain"}], put, "this is a text attachment"),
+
+    Headers = [{"Origin", "http://example.com"}],
+    Url = DocUrl ++ "?attachments=true",
+    case ibrowse:send_req(Url, Headers, get, []) of
+    {ok, Code, _RespHeaders, _Body} ->
+        etap:is(Code, "200", "Response without errors");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+% COUCHDB-1689
+test_doc_with_attachment_range_request() ->
+    AttachmentUrl = server() ++ "etap-test-db/doc2/attachment.bin",
+    % Use a Content-Type that doesn't get compressed
+    ibrowse:send_req(AttachmentUrl,
+        [{"Content-Type", "application/octet-stream"}], put,
+        "this is an attachment"),
+
+    Headers = [{"Origin", "http://example.com"}, {"Range", "bytes=0-6"}],
+    case ibrowse:send_req(AttachmentUrl, Headers, get, []) of
+    {ok, Code, _RespHeaders, _Body} ->
+        etap:is(Code, "206", "Response without errors");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+% COUCHDB-1697
+test_if_none_match_header() ->
+    Url = server() ++ "etap-test-db/doc2",
+    Headers = [{"Origin", "http://example.com"}],
+    {ok, _, _RespHeaders, _} = ibrowse:send_req(Url, Headers, get, []),
+    ETag = proplists:get_value("ETag", _RespHeaders),
+    Headers2 = [{"Origin", "http://example.com"}, {"If-None-Match", ETag}],
+    case ibrowse:send_req(Url, Headers2, get, []) of
+    {ok, Code, _RespHeaders2, _} ->
+        etap:is(Code, "304", "Responded with Not Modified");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_db_request_credentials_header_off() ->
+    Headers = [{"Origin", "http://example.com"}],
+    Url = server() ++ "etap-test-db",
+    case ibrowse:send_req(Url, Headers, get, []) of
+    {ok, _, RespHeaders, _Body} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Credentials", RespHeaders),
+            undefined,
+            "db Access-Control-Allow-Credentials off");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_db_request_credentials_header_on() ->
+    Headers = [{"Origin", "http://example.com"}],
+    Url = server() ++ "etap-test-db",
+    case ibrowse:send_req(Url, Headers, get, []) of
+    {ok, _, RespHeaders, _Body} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Credentials", RespHeaders),
+            "true",
+            "db Access-Control-Allow-Credentials ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_db_preflight_request(VHost) ->
+    Url = server() ++ "etap-test-db",
+    Headers = [{"Origin", "http://example.com"},
+               {"Access-Control-Request-Method", "GET"}]
+               ++ maybe_append_vhost(VHost),
+    case ibrowse:send_req(Url, Headers, options, []) of
+    {ok, _, RespHeaders, _} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Methods", RespHeaders),
+                ?SUPPORTED_METHODS,
+                "db Access-Control-Allow-Methods ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+
+test_db1_origin_request(VHost) ->
+    Headers = [{"Origin", "http://example.com"}]
+               ++ maybe_append_vhost(VHost),
+    Url = server() ++ "etap-test-db1",
+    case ibrowse:send_req(Url, Headers, get, [], [{host_header, "example.com"}]) of
+    {ok, _, RespHeaders, _Body} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            "http://example.com",
+            "db origin ok");
+    _Else ->
+        io:format("else ~p~n", [_Else]),
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_db_preflight_auth_request() ->
+    Url = server() ++ "etap-test-db2",
+    Headers = [{"Origin", "http://example.com"},
+               {"Access-Control-Request-Method", "GET"}],
+    case ibrowse:send_req(Url, Headers, options, []) of
+    {ok, _Status, RespHeaders, _} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Methods", RespHeaders),
+                ?SUPPORTED_METHODS,
+                "db Access-Control-Allow-Methods ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+
+test_db_origin_auth_request() ->
+    Headers = [{"Origin", "http://example.com"}],
+    Url = server() ++ "etap-test-db2",
+
+    case ibrowse:send_req(Url, Headers, get, [],
+        [{basic_auth, {"test", "test"}}]) of
+    {ok, _, RespHeaders, _Body} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            "http://example.com",
+            "db origin ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_preflight_with_wildcard() ->
+    Headers = [{"Origin", "http://example.com"},
+               {"Access-Control-Request-Method", "GET"}],
+    case ibrowse:send_req(server(), Headers, options, []) of
+    {ok, _, RespHeaders, _}  ->
+        % I would either expect the current origin or a wildcard to be returned
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            "http://example.com",
+            "db origin ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_preflight_with_port1(VHost) ->
+    Headers = [{"Origin", "http://example.com:5984"},
+               {"Access-Control-Request-Method", "GET"}]
+               ++ maybe_append_vhost(VHost),
+    case ibrowse:send_req(server(), Headers, options, []) of
+    {ok, _, RespHeaders, _}  ->
+        % I would either expect the current origin or a wildcard to be returned
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            undefined,
+            "check non defined host:port in origin ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_preflight_with_port2() ->
+    Headers = [{"Origin", "http://example.com:5984"},
+               {"Access-Control-Request-Method", "GET"}],
+    case ibrowse:send_req(server(), Headers, options, []) of
+    {ok, _, RespHeaders, _}  ->
+        % I would either expect the current origin or a wildcard to be returned
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            "http://example.com:5984",
+            "check host:port in origin ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_preflight_with_scheme1(VHost) ->
+    Headers = [{"Origin", "https://example.com:5984"},
+               {"Access-Control-Request-Method", "GET"}]
+               ++ maybe_append_vhost(VHost),
+    case ibrowse:send_req(server(), Headers, options, []) of
+    {ok, _, RespHeaders, _}  ->
+        % I would either expect the current origin or a wildcard to be returned
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            undefined,
+            "check non defined scheme in origin ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_preflight_with_scheme2() ->
+    Headers = [{"Origin", "https://example.com:5984"},
+               {"Access-Control-Request-Method", "GET"}],
+    case ibrowse:send_req(server(), Headers, options, []) of
+    {ok, _, RespHeaders, _}  ->
+        % I would either expect the current origin or a wildcard to be returned
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            "https://example.com:5984",
+            "check scheme in origin ok");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+test_case_sensitive_mismatch_of_allowed_origins() ->
+    Headers = [{"Origin", "http://EXAMPLE.COM"}],
+    Url = server() ++ "etap-test-db",
+    case ibrowse:send_req(Url, Headers, get, []) of
+    {ok, _, RespHeaders, _Body} ->
+        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
+            undefined,
+            "db access config case mismatch");
+    _ ->
+        etap:is(false, true, "ibrowse failed")
+    end.
+
+maybe_append_vhost(true) ->
+    [{"Host", "http://example.com"}];
+maybe_append_vhost(Else) ->
+    [].

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/250-upgrade-legacy-view-files.t
----------------------------------------------------------------------
diff --git a/src/test/etap/250-upgrade-legacy-view-files.t b/src/test/etap/250-upgrade-legacy-view-files.t
new file mode 100644
index 0000000..e720b1c
--- /dev/null
+++ b/src/test/etap/250-upgrade-legacy-view-files.t
@@ -0,0 +1,168 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(8),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+
+    % commit sofort
+    ok = couch_config:set("query_server_config", "commit_freq", "0"),
+
+    test_upgrade(),
+
+    couch_server_sup:stop(),
+    ok.
+
+fixture_path() ->
+    test_util:source_file("test/etap/fixtures").
+
+old_db() ->
+    fixture_path() ++ "/" ++ old_db_name().
+
+old_db_name() ->
+    "test.couch".
+
+old_view() ->
+    fixture_path() ++ "/" ++ old_view_name().
+
+old_view_name() ->
+    "3b835456c235b1827e012e25666152f3.view".
+
+new_view_name() ->
+    "a1c5929f912aca32f13446122cc6ce50.view".
+
+couch_url() ->
+    "http://" ++ addr() ++ ":" ++ port().
+
+addr() ->
+    couch_config:get("httpd", "bind_address", "127.0.0.1").
+
+port() ->
+    integer_to_list(mochiweb_socket_server:get(couch_httpd, port)).
+
+
+% <= 1.2.x
+-record(index_header,
+    {seq=0,
+    purge_seq=0,
+    id_btree_state=nil,
+    view_states=nil
+    }).
+
+% >= 1.3.x
+-record(mrheader, {
+    seq=0,
+    purge_seq=0,
+    id_btree_state=nil,
+    view_states=nil
+}).
+
+ensure_header(File, MatchFun, Msg) ->
+    {ok, Fd} = couch_file:open(File),
+    {ok, {_Sig, Header}} = couch_file:read_header(Fd),
+    couch_file:close(Fd),
+    etap:fun_is(MatchFun, Header, "ensure " ++ Msg ++ " header for file: " ++ File).
+
+file_exists(File) ->
+    % open without creating
+    case file:open(File, [read, raw]) of
+    {ok, Fd_Read} ->
+        file:close(Fd_Read),
+        true;
+    _Error ->
+        false
+    end.
+
+cleanup() ->
+    DbDir = couch_config:get("couchdb", "database_dir"),
+    Files = [
+        DbDir ++ "/test.couch",
+        DbDir ++ "/.test_design/" ++ old_view_name(),
+        DbDir ++ "/.test_design/mrview/" ++ new_view_name()
+    ],
+    lists:foreach(fun(File) -> file:delete(File) end, Files),
+    etap:ok(true, "cleanup").
+
+test_upgrade() ->
+
+    cleanup(),
+
+    % copy old db file into db dir
+    DbDir = couch_config:get("couchdb", "database_dir"),
+    DbTarget = DbDir ++ "/" ++ old_db_name(),
+    filelib:ensure_dir(DbDir),
+    OldDbName = old_db(),
+    {ok, _} = file:copy(OldDbName, DbTarget),
+
+    % copy old view file into view dir
+    ViewDir = couch_config:get("couchdb", "view_index_dir"),
+    ViewTarget = ViewDir ++ "/.test_design/" ++ old_view_name(),
+    filelib:ensure_dir(ViewTarget),
+    OldViewName = old_view(),
+    {ok, _} = file:copy(OldViewName, ViewTarget),
+
+    % ensure old header
+    ensure_header(ViewTarget, fun(#index_header{}) -> true; (_) -> false end, "old"),
+
+    % query view
+    ViewUrl = couch_url() ++ "/test/_design/test/_view/test",
+    {ok, Code, _Headers, Body}  = test_util:request(ViewUrl, [], get),
+
+    % expect results
+    etap:is(Code, 200, "valid view result http status code"),
+    ExpectBody = <<"{\"total_rows\":2,\"offset\":0,\"rows\":[\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470009ec\",\"key\":1,\"value\":null},\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470012b6\",\"key\":2,\"value\":null}\r\n]}\n">>,
+    etap:is(Body, ExpectBody, "valid view result"),
+
+    % ensure old file gone.
+    etap:is(file_exists(ViewTarget), false, "ensure old file is gone"),
+
+    % ensure new header
+    NewViewFile = ViewDir ++ "/.test_design/mrview/" ++ new_view_name(),
+
+    % add doc(s)
+    test_util:request(
+        couch_url() ++ "/test/boo",
+        [{"Content-Type", "application/json"}],
+        put,
+        <<"{\"a\":3}">>),
+
+    % query again
+    {ok, Code2, _Headers2, Body2} = test_util:request(ViewUrl, [], get),
+
+    % expect results
+    etap:is(Code2, 200, "valid view result http status code"),
+    ExpectBody2 = <<"{\"total_rows\":3,\"offset\":0,\"rows\":[\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470009ec\",\"key\":1,\"value\":null},\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470012b6\",\"key\":2,\"value\":null},\r\n{\"id\":\"boo\",\"key\":3,\"value\":null}\r\n]}\n">>,
+    etap:is(Body2, ExpectBody2, "valid view result after doc add"),
+
+    % ensure no rebuild
+    % TBD no idea how to actually test this.
+
+    % ensure new header.
+    timer:sleep(2000),
+    ensure_header(NewViewFile, fun(#mrheader{}) -> true; (_) -> false end, "new"),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/src/test/etap/Makefile.am b/src/test/etap/Makefile.am
new file mode 100644
index 0000000..66048a9
--- /dev/null
+++ b/src/test/etap/Makefile.am
@@ -0,0 +1,108 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+noinst_SCRIPTS = run
+noinst_DATA = test_util.beam test_web.beam
+
+noinst_PROGRAMS = test_cfg_register
+test_cfg_register_SOURCES = test_cfg_register.c
+test_cfg_register_CFLAGS = -D_BSD_SOURCE
+
+%.beam: %.erl
+	$(ERLC) $<
+
+run: run.tpl
+	sed -e "s|%abs_top_srcdir%|@abs_top_srcdir@|g" \
+	    -e "s|%abs_top_builddir%|@abs_top_builddir@|g" > \
+	$@ < $<
+	chmod +x $@
+
+# @@ wildcards are NOT portable, please replace with clean-local rules
+CLEANFILES = run *.beam
+
+DISTCLEANFILES = temp.*
+
+fixture_files = \
+    fixtures/3b835456c235b1827e012e25666152f3.view \
+    fixtures/test.couch
+
+tap_files = \
+    001-load.t \
+    002-icu-driver.t \
+    010-file-basics.t \
+    011-file-headers.t \
+    020-btree-basics.t \
+    021-btree-reductions.t \
+    030-doc-from-json.t \
+    031-doc-to-json.t \
+    040-util.t \
+    041-uuid-gen-id.ini \
+    041-uuid-gen-seq.ini \
+    041-uuid-gen-utc.ini \
+    041-uuid-gen.t \
+    042-work-queue.t \
+    050-stream.t \
+    060-kt-merging.t \
+    061-kt-missing-leaves.t \
+    062-kt-remove-leaves.t \
+    063-kt-get-leaves.t \
+    064-kt-counting.t \
+    065-kt-stemming.t \
+    070-couch-db.t \
+    072-cleanup.t \
+    073-changes.t \
+    074-doc-update-conflicts.t \
+    075-auth-cache.t \
+    076-file-compression.t \
+    077-couch-db-fast-db-delete-create.t \
+    080-config-get-set.t \
+    081-config-override.1.ini \
+    081-config-override.2.ini \
+    081-config-override.t \
+    082-config-register.t \
+    083-config-no-files.t \
+    090-task-status.t \
+    100-ref-counter.t \
+    120-stats-collect.t \
+    121-stats-aggregates.cfg \
+    121-stats-aggregates.ini \
+    121-stats-aggregates.t \
+    130-attachments-md5.t \
+    140-attachment-comp.t \
+    150-invalid-view-seq.t \
+    160-vhosts.t \
+    170-os-daemons.es \
+    170-os-daemons.t \
+    171-os-daemons-config.es \
+    171-os-daemons-config.t \
+    172-os-daemon-errors.1.sh \
+    172-os-daemon-errors.2.sh \
+    172-os-daemon-errors.3.sh \
+    172-os-daemon-errors.4.sh \
+    172-os-daemon-errors.t \
+    173-os-daemon-cfg-register.t \
+    180-http-proxy.ini \
+    180-http-proxy.t \
+    190-json-stream-parse.t \
+    200-view-group-no-db-leaks.t \
+    201-view-group-shutdown.t \
+    210-os-proc-pool.t \
+    220-compaction-daemon.t \
+    230-pbkfd2.t \
+    231-cors.t \
+    250-upgrade-legacy-view-files.t
+
+EXTRA_DIST = \
+    run.tpl \
+    test_web.erl \
+    $(fixture_files) \
+    $(tap_files)

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view
----------------------------------------------------------------------
diff --git a/src/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view b/src/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view
new file mode 100644
index 0000000..9c67648
Binary files /dev/null and b/src/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view differ

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/fixtures/test.couch
----------------------------------------------------------------------
diff --git a/src/test/etap/fixtures/test.couch b/src/test/etap/fixtures/test.couch
new file mode 100644
index 0000000..32c79af
Binary files /dev/null and b/src/test/etap/fixtures/test.couch differ

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/run.tpl
----------------------------------------------------------------------
diff --git a/src/test/etap/run.tpl b/src/test/etap/run.tpl
new file mode 100644
index 0000000..d6d6dbe
--- /dev/null
+++ b/src/test/etap/run.tpl
@@ -0,0 +1,32 @@
+#!/bin/sh -e
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+SRCDIR="%abs_top_srcdir%"
+BUILDDIR="%abs_top_builddir%"
+export ERL_LIBS="$BUILDDIR/src/:$ERL_LIBS"
+export ERL_FLAGS="$ERL_FLAGS -pa $BUILDDIR/test/etap/"
+
+if test $# -eq 1; then
+    OPTS=""
+    TGT=$1
+else
+    OPTS=$1
+    TGT=$2
+fi
+
+if test -f $TGT; then
+    prove $OPTS $TGT
+else
+    prove $OPTS $TGT/*.t
+fi

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/test_cfg_register.c
----------------------------------------------------------------------
diff --git a/src/test/etap/test_cfg_register.c b/src/test/etap/test_cfg_register.c
new file mode 100644
index 0000000..c910bac
--- /dev/null
+++ b/src/test/etap/test_cfg_register.c
@@ -0,0 +1,31 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <stdio.h>
+#include <stdlib.h>
+
+int
+main(int argc, const char * argv[])
+{
+    char c = '\0';
+    size_t num = 1;
+    
+    fprintf(stdout, "[\"register\", \"s1\"]\n");
+    fprintf(stdout, "[\"register\", \"s2\", \"k\"]\n");
+    fflush(stdout);
+    
+    while(c != '\n' && num > 0) {
+        num = fread(&c, 1, 1, stdin);
+    }
+    
+    exit(0);
+}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/test_util.erl.in
----------------------------------------------------------------------
diff --git a/src/test/etap/test_util.erl.in b/src/test/etap/test_util.erl.in
new file mode 100644
index 0000000..352714e
--- /dev/null
+++ b/src/test/etap/test_util.erl.in
@@ -0,0 +1,94 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(test_util).
+
+-export([init_code_path/0]).
+-export([source_file/1, build_file/1, config_files/0]).
+-export([run/2]).
+-export([request/3, request/4]).
+
+srcdir() ->
+    "@abs_top_srcdir@".
+
+builddir() ->
+    "@abs_top_builddir@".
+
+init_code_path() ->
+    Paths = [
+        "etap",
+        "couchdb",
+        "ejson",
+        "erlang-oauth",
+        "ibrowse",
+        "mochiweb",
+        "snappy"
+    ],
+    lists:foreach(fun(Name) ->
+        code:add_patha(filename:join([builddir(), "src", Name]))
+    end, Paths).
+
+source_file(Name) ->
+    filename:join([srcdir(), Name]).
+
+build_file(Name) ->
+    filename:join([builddir(), Name]).
+
+config_files() ->
+    [
+        build_file("etc/couchdb/default_dev.ini"),
+        source_file("test/random_port.ini"),
+        build_file("etc/couchdb/local_dev.ini")
+    ].
+
+
+run(Plan, Fun) ->
+    test_util:init_code_path(),
+    etap:plan(Plan),
+    case (catch Fun()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally:~n~p", [Other])),
+            timer:sleep(500),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+request(Url, Headers, Method) ->
+    request(Url, Headers, Method, []).
+
+request(Url, Headers, Method, Body) ->
+    request(Url, Headers, Method, Body, 3).
+
+request(_Url, _Headers, _Method, _Body, 0) ->
+    {error, request_failed};
+request(Url, Headers, Method, Body, N) ->
+    case code:is_loaded(ibrowse) of
+    false ->
+        {ok, _} = ibrowse:start();
+    _ ->
+        ok
+    end,
+    case ibrowse:send_req(Url, Headers, Method, Body) of
+    {ok, Code0, RespHeaders, RespBody0} ->
+        Code = list_to_integer(Code0),
+        RespBody = iolist_to_binary(RespBody0),
+        {ok, Code, RespHeaders, RespBody};
+    {error, {'EXIT', {normal, _}}} ->
+        % Connection closed right after a successful request that
+        % used the same connection.
+        request(Url, Headers, Method, Body, N - 1);
+    Error ->
+        Error
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/test_web.erl
----------------------------------------------------------------------
diff --git a/src/test/etap/test_web.erl b/src/test/etap/test_web.erl
new file mode 100644
index 0000000..ed78651
--- /dev/null
+++ b/src/test/etap/test_web.erl
@@ -0,0 +1,99 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(test_web).
+-behaviour(gen_server).
+
+-export([start_link/0, loop/1, get_port/0, set_assert/1, check_last/0]).
+-export([init/1, terminate/2, code_change/3]).
+-export([handle_call/3, handle_cast/2, handle_info/2]).
+
+-define(SERVER, test_web_server).
+-define(HANDLER, test_web_handler).
+
+start_link() ->
+    gen_server:start({local, ?HANDLER}, ?MODULE, [], []),
+    mochiweb_http:start([
+        {name, ?SERVER},
+        {loop, {?MODULE, loop}},
+        {port, 0}
+    ]).
+
+loop(Req) ->
+    %etap:diag("Handling request: ~p", [Req]),
+    case gen_server:call(?HANDLER, {check_request, Req}) of
+        {ok, RespInfo} ->
+            {ok, Req:respond(RespInfo)};
+        {raw, {Status, Headers, BodyChunks}} ->
+            Resp = Req:start_response({Status, Headers}),
+            lists:foreach(fun(C) -> Resp:send(C) end, BodyChunks),
+            erlang:put(mochiweb_request_force_close, true),
+            {ok, Resp};
+        {chunked, {Status, Headers, BodyChunks}} ->
+            Resp = Req:respond({Status, Headers, chunked}),
+            timer:sleep(500),
+            lists:foreach(fun(C) -> Resp:write_chunk(C) end, BodyChunks),
+            Resp:write_chunk([]),
+            {ok, Resp};
+        {error, Reason} ->
+            etap:diag("Error: ~p", [Reason]),
+            Body = lists:flatten(io_lib:format("Error: ~p", [Reason])),
+            {ok, Req:respond({200, [], Body})}
+    end.
+
+get_port() ->
+    mochiweb_socket_server:get(?SERVER, port).
+
+set_assert(Fun) ->
+    ok = gen_server:call(?HANDLER, {set_assert, Fun}).
+
+check_last() ->
+    gen_server:call(?HANDLER, last_status).
+
+init(_) ->
+    {ok, nil}.
+
+terminate(_Reason, _State) ->
+    ok.
+
+handle_call({check_request, Req}, _From, State) when is_function(State, 1) ->
+    Resp2 = case (catch State(Req)) of
+        {ok, Resp} -> {reply, {ok, Resp}, was_ok};
+        {raw, Resp} -> {reply, {raw, Resp}, was_ok};
+        {chunked, Resp} -> {reply, {chunked, Resp}, was_ok};
+        Error -> {reply, {error, Error}, not_ok}
+    end,
+    Req:cleanup(),
+    Resp2;
+handle_call({check_request, _Req}, _From, _State) ->
+    {reply, {error, no_assert_function}, not_ok};
+handle_call(last_status, _From, State) when is_atom(State) ->
+    {reply, State, nil};
+handle_call(last_status, _From, State) ->
+    {reply, {error, not_checked}, State};
+handle_call({set_assert, Fun}, _From, nil) ->
+    {reply, ok, Fun};
+handle_call({set_assert, _}, _From, State) ->
+    {reply, {error, assert_function_set}, State};
+handle_call(Msg, _From, State) ->
+    {reply, {ignored, Msg}, State}.
+
+handle_cast(Msg, State) ->
+    etap:diag("Ignoring cast message: ~p", [Msg]),
+    {noreply, State}.
+
+handle_info(Msg, State) ->
+    etap:diag("Ignoring info message: ~p", [Msg]),
+    {noreply, State}.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/javascript/Makefile.am
----------------------------------------------------------------------
diff --git a/src/test/javascript/Makefile.am b/src/test/javascript/Makefile.am
new file mode 100644
index 0000000..e7036ca
--- /dev/null
+++ b/src/test/javascript/Makefile.am
@@ -0,0 +1,27 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+EXTRA_DIST = \
+	cli_runner.js \
+	couch_http.js \
+	test_setup.js \
+	run.tpl
+
+noinst_SCRIPTS = run
+CLEANFILES = run
+
+run: run.tpl
+	sed -e "s|%abs_top_srcdir%|$(abs_top_srcdir)|" \
+		-e "s|%abs_top_builddir%|$(abs_top_builddir)|" \
+		-e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \
+	< $< > $@
+	chmod +x $@

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/javascript/cli_runner.js
----------------------------------------------------------------------
diff --git a/src/test/javascript/cli_runner.js b/src/test/javascript/cli_runner.js
new file mode 100644
index 0000000..e8ebd2e
--- /dev/null
+++ b/src/test/javascript/cli_runner.js
@@ -0,0 +1,47 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+//
+
+/*
+ * Futon test suite was designed to be able to run all tests populated into
+ * couchTests. Here we should only be loading one test, so we'll pop the first
+ * test off the list and run the test. If more than one item is loaded in the
+ * test object, return an error.
+ */
+function runTest() {
+  var count = 0;
+  var start = new Date().getTime();
+
+  for(var name in couchTests) {
+      count++;
+  }
+
+  if (count !== 1) {
+      console.log('Only one test per file is allowed.');
+      quit(1);
+  }
+
+  try {
+    // Add artificial wait for each test of 1 sec
+    while (new Date().getTime() < start + 1200);
+    couchTests[name]();
+    print('OK');
+  } catch(e) {
+    console.log("FAIL\nReason: " + e.message);
+    fmtStack(e.stack);
+    quit(1);
+  }
+}
+
+waitForSuccess(CouchDB.isRunning, 'isRunning');
+
+runTest();

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/javascript/couch_http.js
----------------------------------------------------------------------
diff --git a/src/test/javascript/couch_http.js b/src/test/javascript/couch_http.js
new file mode 100644
index 0000000..c44ce28
--- /dev/null
+++ b/src/test/javascript/couch_http.js
@@ -0,0 +1,73 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+(function() {
+  if(typeof(CouchHTTP) != "undefined") {
+    CouchHTTP.prototype.open = function(method, url, async) {
+      if(!/^\s*http:\/\//.test(url)) {
+        if(/^\//.test(url)) {
+          // The couch.uri file (base_url) has a trailing slash
+          url = this.base_url + url.slice(1);
+        } else {
+          url = this.base_url + url;
+        }
+      }
+      
+      return this._open(method, url, async);
+    };
+    
+    CouchHTTP.prototype.setRequestHeader = function(name, value) {
+      // Drop content-length headers because cURL will set it for us
+      // based on body length
+      if(name.toLowerCase().replace(/^\s+|\s+$/g, '') != "content-length") {
+        this._setRequestHeader(name, value);
+      }
+    }
+    
+    CouchHTTP.prototype.send = function(body) {
+      this._send(body || "");
+      var headers = {};
+      this._headers.forEach(function(hdr) {
+          var pair = hdr.split(":");
+          var name = pair.shift();
+          headers[name] = pair.join(":").replace(/^\s+|\s+$/g, "");
+      });
+      this.headers = headers;
+    };
+
+    CouchHTTP.prototype.getResponseHeader = function(name) {
+      for(var hdr in this.headers) {
+        if(hdr.toLowerCase() == name.toLowerCase()) {
+          return this.headers[hdr];
+        }
+      }
+      return null;
+    };
+  }
+})();
+
+CouchDB.urlPrefix = "";
+CouchDB.newXhr = function() {
+  return new CouchHTTP();
+};
+
+CouchDB.xhrheader = function(xhr, header) {
+  if(typeof(xhr) == "CouchHTTP") {
+    return xhr.getResponseHeader(header);
+  } else {
+    return xhr.headers[header];
+  }
+}
+
+CouchDB.xhrbody = function(xhr) {
+  return xhr.responseText || xhr.body;
+}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/javascript/run.tpl
----------------------------------------------------------------------
diff --git a/src/test/javascript/run.tpl b/src/test/javascript/run.tpl
new file mode 100644
index 0000000..75192da
--- /dev/null
+++ b/src/test/javascript/run.tpl
@@ -0,0 +1,138 @@
+#!/bin/sh -e
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+SRC_DIR=%abs_top_srcdir%
+BUILD_DIR=%abs_top_builddir%
+SCRIPT_DIR=$SRC_DIR/share/www/script
+JS_TEST_DIR=$SRC_DIR/test/javascript
+
+COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs
+COUCH_URI_FILE=%localstaterundir%/couch.uri
+
+# make check-js calls us with MAKE=$(MAKE) so BSDish `gmake` invocations
+# will get passed on correctly. If $0 gets run manually, default to
+# `make`
+if [ -z "$MAKE" ]; then
+    MAKE=make
+fi
+
+trap 'abort' EXIT INT
+
+start() {
+	./utils/run -b -r 0 -n \
+		-a $BUILD_DIR/etc/couchdb/default_dev.ini \
+		-a $SRC_DIR/test/random_port.ini \
+		-a $BUILD_DIR/etc/couchdb/local_dev.ini 1>/dev/null
+}
+
+stop() {
+    ./utils/run -d 1>/dev/null
+}
+
+restart() {
+    stop
+    start
+}
+
+abort() {
+    trap - 0
+    stop
+    exit 2
+}
+
+process_response() {
+    while read data
+    do
+        if [ $data = 'restart' ];
+        then
+            if [ -z $COUCHDB_NO_START ]; then
+                restart
+            fi
+        else
+            echo "$data"
+        fi
+    done
+}
+
+run() {
+    # start the tests
+    /bin/echo -n "$1 ... "
+    $COUCHJS -H -u $COUCH_URI_FILE \
+        $SCRIPT_DIR/json2.js \
+        $SCRIPT_DIR/sha1.js \
+        $SCRIPT_DIR/oauth.js \
+        $SCRIPT_DIR/couch.js \
+        $SCRIPT_DIR/replicator_db_inc.js \
+        $SCRIPT_DIR/couch_test_runner.js \
+        $JS_TEST_DIR/couch_http.js \
+        $JS_TEST_DIR/test_setup.js \
+        $1 \
+        $JS_TEST_DIR/cli_runner.js | process_response
+
+    if [ -z $RESULT ]; then
+        RESULT=$?
+    elif [ "$?" -eq 1 ]; then
+        RESULT=$?
+    fi
+
+}
+
+run_files() {
+    COUNTER=1
+    FILE_COUNT=$(ls -l $1 | wc -l)
+    FILE_COUNT=$(expr $FILE_COUNT + 0)
+    for TEST_SRC in $1
+    do
+        /bin/echo -n "$COUNTER/$FILE_COUNT "
+        COUNTER=$(expr $COUNTER + 1)
+        run $TEST_SRC
+    done
+}
+
+# start CouchDB
+if [ -z $COUCHDB_NO_START ]; then
+    $MAKE dev
+    start
+fi
+
+echo "Running javascript tests ..."
+
+if [ "$#" -eq 0 ];
+then
+    run_files "$SCRIPT_DIR/test/*.js"
+else
+    if [ -d $1 ]; then
+        run_files "$1/*.js"
+    else
+        TEST_SRC="$1"
+        if [ ! -f $TEST_SRC ]; then
+            TEST_SRC="$SCRIPT_DIR/test/$1"
+            if [ ! -f $TEST_SRC ]; then
+                TEST_SRC="$SCRIPT_DIR/test/$1.js"
+                if [ ! -f $TEST_SRC ]; then
+                    echo "file $1 does not exist"
+                    exit 1
+                fi
+            fi
+        fi
+    fi
+    run $TEST_SRC
+fi
+
+if [ -z $COUCHDB_NO_START ]; then
+    stop
+fi
+
+trap - 0
+exit $RESULT

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/javascript/test_setup.js
----------------------------------------------------------------------
diff --git a/src/test/javascript/test_setup.js b/src/test/javascript/test_setup.js
new file mode 100644
index 0000000..9347455
--- /dev/null
+++ b/src/test/javascript/test_setup.js
@@ -0,0 +1,89 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+/*
+ * Add global couchTests object required for existing tests.
+ */
+var couchTests = {}; 
+
+var console = { 
+  log: function(arg) {
+    var msg = (arg.toString()).replace(/\n/g, "\n    ");
+    print(msg, true);
+  }
+};
+
+var fmtStack = function(stack) {
+  if(!stack) {
+    console.log("No stack information");
+    return;
+  }
+  console.log("Trace back (most recent call first):\n");
+  var re = new RegExp("(.*?)@([^:]*):(.*)$");
+  var lines = stack.split("\n");
+  for(var i = 0; i < lines.length; i++) {
+    var line = lines[i];
+    if(!line.length) continue;
+    var match = re.exec(line);
+    if(!match) continue
+    var match = re.exec(line);
+    if(!match) continue
+    var source = match[1].substr(0, 70);
+    var file = match[2];
+    var lnum = match[3];
+    while(lnum.length < 3) lnum = " " + lnum;
+    console.log(" " + lnum + ": " + file);
+    console.log("      " + source);
+  }
+} 
+
+function T(arg1, arg2) {
+  if(!arg1) {
+    var result = (arg2 ? arg2 : arg1);
+    throw((result instanceof Error ? result : Error(result)));
+  }
+} 
+
+function waitForSuccess(fun, tag) {
+  var start = new Date().getTime();
+  var complete = false;
+  
+  while (!complete) {
+    var now = new Date().getTime();
+    if (now > start + 5000) {
+      complete = true;
+      print('FAIL');
+      print(tag);
+      quit(1);
+    }
+    try {
+      while (new Date().getTime() < now + 500);
+      complete = fun();
+    } catch (e) {}
+  }
+}
+
+function restartServer() {
+  print('restart');
+  var start = new Date().getTime();
+  while (new Date().getTime() < start + 1000);
+  waitForSuccess(CouchDB.isRunning, 'restart');
+}
+
+/*
+ * If last_req is an object, we got something back. This might be an error, but
+ * CouchDB is up and running!
+ */
+CouchDB.isRunning = function() {
+  CouchDB.last_req = CouchDB.request("GET", "/");
+  return typeof CouchDB.last_req == 'object';
+};

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/random_port.ini
----------------------------------------------------------------------
diff --git a/src/test/random_port.ini b/src/test/random_port.ini
new file mode 100644
index 0000000..2b2d130
--- /dev/null
+++ b/src/test/random_port.ini
@@ -0,0 +1,19 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+;
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[httpd]
+port = 0

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/view_server/Makefile.am
----------------------------------------------------------------------
diff --git a/src/test/view_server/Makefile.am b/src/test/view_server/Makefile.am
new file mode 100644
index 0000000..11e7feb
--- /dev/null
+++ b/src/test/view_server/Makefile.am
@@ -0,0 +1,15 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+EXTRA_DIST = \
+	query_server_spec.rb \
+	run_native_process.es

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/view_server/query_server_spec.rb
----------------------------------------------------------------------
diff --git a/src/test/view_server/query_server_spec.rb b/src/test/view_server/query_server_spec.rb
new file mode 100644
index 0000000..c53daff
--- /dev/null
+++ b/src/test/view_server/query_server_spec.rb
@@ -0,0 +1,824 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+# to run (requires ruby and rspec):
+# spec test/view_server/query_server_spec.rb -f specdoc --color
+# 
+# environment options:
+#   QS_TRACE=true
+#     shows full output from the query server
+#   QS_LANG=lang
+#     run tests on the query server (for now, one of: js, erlang)
+# 
+
+COUCH_ROOT = "#{File.dirname(__FILE__)}/../.." unless defined?(COUCH_ROOT)
+LANGUAGE = ENV["QS_LANG"] || "js"
+
+puts "Running query server specs for #{LANGUAGE} query server"
+
+require 'rspec'
+require 'json'
+
+class OSProcessRunner
+  def self.run
+    trace = ENV["QS_TRACE"] || false
+    puts "launching #{run_command}" if trace
+    if block_given?
+      IO.popen(run_command, "r+") do |io|
+        qs = QueryServerRunner.new(io, trace)
+        yield qs
+      end
+    else
+      io = IO.popen(run_command, "r+")
+      QueryServerRunner.new(io, trace)
+    end
+  end
+  def initialize io, trace = false
+    @qsio = io
+    @trace = trace
+  end
+  def close
+    @qsio.close
+  end
+  def reset!
+    run(["reset"])
+  end
+  def add_fun(fun)
+    run(["add_fun", fun])
+  end
+  def teach_ddoc(ddoc)
+    run(["ddoc", "new", ddoc_id(ddoc), ddoc])
+  end
+  def ddoc_run(ddoc, fun_path, args)
+    run(["ddoc", ddoc_id(ddoc), fun_path, args])
+  end
+  def ddoc_id(ddoc)
+    d_id = ddoc["_id"]
+    raise 'ddoc must have _id' unless d_id
+    d_id
+  end
+  def get_chunks
+    resp = jsgets
+    raise "not a chunk" unless resp.first == "chunks"
+    return resp[1]
+  end
+  def run json
+    rrun json
+    jsgets
+  end
+  def rrun json
+    line = json.to_json
+    puts "run: #{line}" if @trace
+    @qsio.puts line
+  end
+  def rgets
+    resp = @qsio.gets
+    puts "got: #{resp}"  if @trace
+    resp
+  end
+  def jsgets
+    resp = rgets
+    # err = @qserr.gets
+    # puts "err: #{err}" if err
+    if resp
+      begin
+        rj = JSON.parse("[#{resp.chomp}]")[0]
+      rescue JSON::ParserError
+        puts "JSON ERROR (dump under trace mode)"
+        # puts resp.chomp
+        while resp = rgets
+          # puts resp.chomp
+        end
+      end
+      if rj.respond_to?(:[]) && rj.is_a?(Array)
+        if rj[0] == "log"
+          log = rj[1]
+          puts "log: #{log}" if @trace
+          rj = jsgets
+        end
+      end
+      rj
+    else
+      raise "no response"
+    end
+  end
+end
+
+class QueryServerRunner < OSProcessRunner
+
+  COMMANDS = {
+    "js" => "#{COUCH_ROOT}/bin/couchjs_dev #{COUCH_ROOT}/share/server/main.js",
+    "erlang" => "#{COUCH_ROOT}/test/view_server/run_native_process.es"
+  }
+
+  def self.run_command
+    COMMANDS[LANGUAGE]
+  end
+end
+
+class ExternalRunner < OSProcessRunner
+  def self.run_command
+    "#{COUCH_ROOT}/src/couchdb/couchjs #{COUCH_ROOT}/share/server/echo.js"
+  end
+end
+
+# we could organize this into a design document per language.
+# that would make testing future languages really easy.
+
+functions = {
+  "emit-twice" => {
+    "js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}},
+    "erlang" => <<-ERLANG
+      fun({Doc}) ->
+        A = couch_util:get_value(<<"a">>, Doc, null),
+        Emit(<<"foo">>, A),
+        Emit(<<"bar">>, A)
+      end.
+    ERLANG
+  },
+  "emit-once" => {
+    "js" => <<-JS,
+      function(doc){
+        emit("baz",doc.a)
+      }
+      JS
+    "erlang" => <<-ERLANG
+        fun({Doc}) ->
+            A = couch_util:get_value(<<"a">>, Doc, null),
+            Emit(<<"baz">>, A)
+        end.
+    ERLANG
+  },
+  "reduce-values-length" => {
+    "js" => %{function(keys, values, rereduce) { return values.length; }},
+    "erlang" => %{fun(Keys, Values, ReReduce) -> length(Values) end.}
+  },
+  "reduce-values-sum" => {
+    "js" => %{function(keys, values, rereduce) { return sum(values); }},
+    "erlang" => %{fun(Keys, Values, ReReduce) -> lists:sum(Values) end.}
+  },
+  "validate-forbidden" => {
+    "js" => <<-JS,
+      function(newDoc, oldDoc, userCtx) {
+        if(newDoc.bad)
+          throw({forbidden:"bad doc"}); "foo bar";
+      }
+      JS
+    "erlang" => <<-ERLANG
+      fun({NewDoc}, _OldDoc, _UserCtx) ->
+        case couch_util:get_value(<<"bad">>, NewDoc) of
+            undefined -> 1;
+            _ -> {[{forbidden, <<"bad doc">>}]}
+        end
+      end.
+    ERLANG
+  },
+  "show-simple" => {
+    "js" => <<-JS,
+        function(doc, req) {
+            log("ok");
+            return [doc.title, doc.body].join(' - ');
+        }
+    JS
+    "erlang" => <<-ERLANG
+      fun({Doc}, Req) ->
+            Title = couch_util:get_value(<<"title">>, Doc),
+            Body = couch_util:get_value(<<"body">>, Doc),
+            Resp = <<Title/binary, " - ", Body/binary>>,
+        {[{<<"body">>, Resp}]}
+      end.
+    ERLANG
+  },
+  "show-headers" => {
+    "js" => <<-JS,
+        function(doc, req) {
+          var resp = {"code":200, "headers":{"X-Plankton":"Rusty"}};
+          resp.body = [doc.title, doc.body].join(' - ');
+          return resp;
+        }
+     JS
+    "erlang" => <<-ERLANG
+  fun({Doc}, Req) ->
+        Title = couch_util:get_value(<<"title">>, Doc),
+        Body = couch_util:get_value(<<"body">>, Doc),
+        Resp = <<Title/binary, " - ", Body/binary>>,
+        {[
+        {<<"code">>, 200},
+        {<<"headers">>, {[{<<"X-Plankton">>, <<"Rusty">>}]}},
+        {<<"body">>, Resp}
+      ]}
+  end.
+    ERLANG
+  },
+  "show-sends" => {
+    "js" =>  <<-JS,
+        function(head, req) {
+          start({headers:{"Content-Type" : "text/plain"}});
+          send("first chunk");
+          send('second "chunk"');
+          return "tail";
+        };
+    JS
+    "erlang" => <<-ERLANG
+      fun(Head, Req) ->
+        Resp = {[
+          {<<"headers">>, {[{<<"Content-Type">>, <<"text/plain">>}]}}
+        ]},
+        Start(Resp),
+        Send(<<"first chunk">>),
+        Send(<<"second \\\"chunk\\\"">>),
+        <<"tail">>
+      end.
+    ERLANG
+  },
+  "show-while-get-rows" => {
+    "js" =>  <<-JS,
+        function(head, req) {
+          send("first chunk");
+          send(req.q);
+          var row;
+          log("about to getRow " + typeof(getRow));
+          while(row = getRow()) {
+            send(row.key);
+          };
+          return "tail";
+        };
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, {Req}) ->
+            Send(<<"first chunk">>),
+            Send(couch_util:get_value(<<"q">>, Req)),
+            Fun = fun({Row}, _) ->
+                Send(couch_util:get_value(<<"key">>, Row)),
+                {ok, nil}
+            end,
+            {ok, _} = FoldRows(Fun, nil),
+            <<"tail">>
+        end.
+    ERLANG
+  },
+  "show-while-get-rows-multi-send" => {
+    "js" => <<-JS,
+        function(head, req) {
+          send("bacon");
+          var row;
+          log("about to getRow " + typeof(getRow));
+          while(row = getRow()) {
+            send(row.key);
+            send("eggs");
+          };
+          return "tail";
+        };
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, Req) ->
+            Send(<<"bacon">>),
+            Fun = fun({Row}, _) ->
+                Send(couch_util:get_value(<<"key">>, Row)),
+                Send(<<"eggs">>),
+                {ok, nil}
+            end,
+            FoldRows(Fun, nil),
+            <<"tail">>
+        end.
+    ERLANG
+  },
+  "list-simple" => {
+    "js" => <<-JS,
+        function(head, req) {
+          send("first chunk");
+          send(req.q);
+          var row;
+          while(row = getRow()) {
+            send(row.key);
+          };
+          return "early";
+        };
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, {Req}) ->
+            Send(<<"first chunk">>),
+            Send(couch_util:get_value(<<"q">>, Req)),
+            Fun = fun({Row}, _) ->
+                Send(couch_util:get_value(<<"key">>, Row)),
+                {ok, nil}
+            end,
+            FoldRows(Fun, nil),
+            <<"early">>
+        end.
+    ERLANG
+  },
+  "list-chunky" => {
+    "js" => <<-JS,
+        function(head, req) {
+          send("first chunk");
+          send(req.q);
+          var row, i=0;
+          while(row = getRow()) {
+            send(row.key);
+            i += 1;
+            if (i > 2) {
+              return('early tail');
+            }
+          };
+        };
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, {Req}) ->
+            Send(<<"first chunk">>),
+            Send(couch_util:get_value(<<"q">>, Req)),
+            Fun = fun
+                ({Row}, Count) when Count < 2 ->
+                    Send(couch_util:get_value(<<"key">>, Row)),
+                    {ok, Count+1};
+                ({Row}, Count) when Count == 2 ->
+                    Send(couch_util:get_value(<<"key">>, Row)),
+                    {stop, <<"early tail">>}
+            end,
+            {ok, Tail} = FoldRows(Fun, 0),
+            Tail
+        end.
+    ERLANG
+  },
+  "list-old-style" => {
+    "js" => <<-JS,
+        function(head, req, foo, bar) {
+          return "stuff";
+        }
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, Req, Foo, Bar) ->
+            <<"stuff">>
+        end.
+    ERLANG
+  },
+  "list-capped" => {
+    "js" => <<-JS,
+        function(head, req) {
+          send("bacon")
+          var row, i = 0;
+          while(row = getRow()) {
+            send(row.key);
+            i += 1;
+            if (i > 2) {
+              return('early');
+            }
+          };
+        }
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, Req) ->
+            Send(<<"bacon">>),
+            Fun = fun
+                ({Row}, Count) when Count < 2 ->
+                    Send(couch_util:get_value(<<"key">>, Row)),
+                    {ok, Count+1};
+                ({Row}, Count) when Count == 2 ->
+                    Send(couch_util:get_value(<<"key">>, Row)),
+                    {stop, <<"early">>}
+            end,
+            {ok, Tail} = FoldRows(Fun, 0),
+            Tail
+        end.
+    ERLANG
+  },
+  "list-raw" => {
+    "js" => <<-JS,
+        function(head, req) {
+          // log(this.toSource());
+          // log(typeof send);
+          send("first chunk");
+          send(req.q);
+          var row;
+          while(row = getRow()) {
+            send(row.key);
+          };
+          return "tail";
+        };
+    JS
+    "erlang" => <<-ERLANG,
+        fun(Head, {Req}) ->
+            Send(<<"first chunk">>),
+            Send(couch_util:get_value(<<"q">>, Req)),
+            Fun = fun({Row}, _) ->
+                Send(couch_util:get_value(<<"key">>, Row)),
+                {ok, nil}
+            end,
+            FoldRows(Fun, nil),
+            <<"tail">>
+        end.
+    ERLANG
+  },
+  "filter-basic" => {
+    "js" => <<-JS,
+      function(doc, req) {
+        if (doc.good) {
+          return true;
+        }
+      }
+    JS
+    "erlang" => <<-ERLANG,
+        fun({Doc}, Req) ->
+            couch_util:get_value(<<"good">>, Doc)
+        end.
+    ERLANG
+  },
+  "update-basic" => {
+    "js" => <<-JS,
+    function(doc, req) {
+      doc.world = "hello";
+      var resp = [doc, "hello doc"];
+      return resp;
+    }
+    JS
+    "erlang" => <<-ERLANG,
+        fun({Doc}, Req) ->
+            Doc2 = [{<<"world">>, <<"hello">>}|Doc],
+            [{Doc2}, {[{<<"body">>, <<"hello doc">>}]}]
+        end.
+    ERLANG
+  },
+  "error" => {
+    "js" => <<-JS,
+    function() {
+      throw(["error","error_key","testing"]);
+    }
+    JS
+    "erlang" => <<-ERLANG
+    fun(A, B) ->
+      throw([<<"error">>,<<"error_key">>,<<"testing">>])
+    end.
+    ERLANG
+  },
+  "fatal" => {
+    "js" => <<-JS,
+    function() {
+      throw(["fatal","error_key","testing"]);
+    }
+    JS
+    "erlang" => <<-ERLANG
+    fun(A, B) ->
+      throw([<<"fatal">>,<<"error_key">>,<<"testing">>])
+    end.
+    ERLANG
+  }
+}
+
+def make_ddoc(fun_path, fun_str)
+  doc = {"_id"=>"foo"}
+  d = doc
+  while p = fun_path.shift
+    l = p
+    if !fun_path.empty?
+      d[p] = {}
+      d = d[p]
+    end
+  end
+  d[l] = fun_str
+  doc
+end
+
+describe "query server normal case" do
+  before(:all) do
+    `cd #{COUCH_ROOT} && make`
+    @qs = QueryServerRunner.run
+  end
+  after(:all) do
+    @qs.close
+  end
+  it "should reset" do
+    @qs.run(["reset"]).should == true
+  end
+  it "should not erase ddocs on reset" do
+    @fun = functions["show-simple"][LANGUAGE]
+    @ddoc = make_ddoc(["shows","simple"], @fun)
+    @qs.teach_ddoc(@ddoc)
+    @qs.run(["reset"]).should == true   
+    @qs.ddoc_run(@ddoc, 
+      ["shows","simple"], 
+      [{:title => "Best ever", :body => "Doc body"}, {}]).should ==
+    ["resp", {"body" => "Best ever - Doc body"}] 
+  end
+  
+  it "should run map funs" do
+    @qs.reset!
+    @qs.run(["add_fun", functions["emit-twice"][LANGUAGE]]).should == true
+    @qs.run(["add_fun", functions["emit-once"][LANGUAGE]]).should == true
+    rows = @qs.run(["map_doc", {:a => "b"}])
+    rows[0][0].should == ["foo", "b"]
+    rows[0][1].should == ["bar", "b"]
+    rows[1][0].should == ["baz", "b"]
+  end
+  describe "reduce" do
+    before(:all) do
+      @fun = functions["reduce-values-length"][LANGUAGE]
+      @qs.reset!
+    end
+    it "should reduce" do
+      kvs = (0...10).collect{|i|[i,i*2]}
+      @qs.run(["reduce", [@fun], kvs]).should == [true, [10]]
+    end
+  end
+  describe "rereduce" do
+    before(:all) do
+      @fun = functions["reduce-values-sum"][LANGUAGE]
+      @qs.reset!
+    end
+    it "should rereduce" do
+      vs = (0...10).collect{|i|i}
+      @qs.run(["rereduce", [@fun], vs]).should == [true, [45]]
+    end
+  end
+
+  describe "design docs" do
+    before(:all) do
+      @ddoc = {
+        "_id" => "foo"
+      }
+      @qs.reset!
+    end
+    it "should learn design docs" do
+      @qs.teach_ddoc(@ddoc).should == true
+    end
+  end
+
+  # it "should validate"
+  describe "validation" do
+    before(:all) do
+      @fun = functions["validate-forbidden"][LANGUAGE]
+      @ddoc = make_ddoc(["validate_doc_update"], @fun)
+      @qs.teach_ddoc(@ddoc)
+    end
+    it "should allow good updates" do
+      @qs.ddoc_run(@ddoc, 
+        ["validate_doc_update"], 
+        [{"good" => true}, {}, {}]).should == 1
+    end
+    it "should reject invalid updates" do
+      @qs.ddoc_run(@ddoc, 
+        ["validate_doc_update"], 
+        [{"bad" => true}, {}, {}]).should == {"forbidden"=>"bad doc"}
+    end
+  end
+
+  describe "show" do
+    before(:all) do
+      @fun = functions["show-simple"][LANGUAGE]
+      @ddoc = make_ddoc(["shows","simple"], @fun)
+      @qs.teach_ddoc(@ddoc)
+    end
+    it "should show" do
+      @qs.ddoc_run(@ddoc, 
+        ["shows","simple"], 
+        [{:title => "Best ever", :body => "Doc body"}, {}]).should ==
+      ["resp", {"body" => "Best ever - Doc body"}]
+    end
+  end
+
+  describe "show with headers" do
+    before(:all) do
+      # TODO we can make real ddocs up there. 
+      @fun = functions["show-headers"][LANGUAGE]
+      @ddoc = make_ddoc(["shows","headers"], @fun)
+      @qs.teach_ddoc(@ddoc)
+    end
+    it "should show headers" do
+      @qs.ddoc_run(
+        @ddoc, 
+        ["shows","headers"], 
+        [{:title => "Best ever", :body => "Doc body"}, {}]
+      ).
+      should == ["resp", {"code"=>200,"headers" => {"X-Plankton"=>"Rusty"}, "body" => "Best ever - Doc body"}]
+    end
+  end
+  
+  describe "recoverable error" do
+    before(:all) do
+      @fun = functions["error"][LANGUAGE]
+      @ddoc = make_ddoc(["shows","error"], @fun)
+      @qs.teach_ddoc(@ddoc)
+    end
+    it "should not exit" do
+      @qs.ddoc_run(@ddoc, ["shows","error"],
+        [{"foo"=>"bar"}, {"q" => "ok"}]).
+        should == ["error", "error_key", "testing"]
+      # still running
+      @qs.run(["reset"]).should == true
+    end
+  end
+  
+  describe "changes filter" do
+    before(:all) do
+      @fun = functions["filter-basic"][LANGUAGE]
+      @ddoc = make_ddoc(["filters","basic"], @fun)
+      @qs.teach_ddoc(@ddoc)
+    end
+    it "should only return true for good docs" do
+      @qs.ddoc_run(@ddoc, 
+        ["filters","basic"], 
+        [[{"key"=>"bam", "good" => true}, {"foo" => "bar"}, {"good" => true}], {"req" => "foo"}]
+      ).
+      should == [true, [true, false, true]]
+    end
+  end
+  
+  describe "update" do
+    before(:all) do
+      # in another patch we can remove this duplication
+      # by setting up the design doc for each language ahead of time.
+      @fun = functions["update-basic"][LANGUAGE]
+      @ddoc = make_ddoc(["updates","basic"], @fun)
+      @qs.teach_ddoc(@ddoc)
+    end
+    it "should return a doc and a resp body" do
+      up, doc, resp = @qs.ddoc_run(@ddoc, 
+        ["updates","basic"], 
+        [{"foo" => "gnarly"}, {"method" => "POST"}]
+      )
+      up.should == "up"
+      doc.should == {"foo" => "gnarly", "world" => "hello"}
+      resp["body"].should == "hello doc"
+    end
+  end
+
+# end
+#                    LIST TESTS
+# __END__
+
+  describe "ddoc list" do
+      before(:all) do
+        @ddoc = {
+          "_id" => "foo",
+          "lists" => {
+            "simple" => functions["list-simple"][LANGUAGE],
+            "headers" => functions["show-sends"][LANGUAGE],
+            "rows" => functions["show-while-get-rows"][LANGUAGE],
+            "buffer-chunks" => functions["show-while-get-rows-multi-send"][LANGUAGE],
+            "chunky" => functions["list-chunky"][LANGUAGE]
+          }
+        }
+        @qs.teach_ddoc(@ddoc)
+      end
+      
+      describe "example list" do
+        it "should run normal" do
+          @qs.ddoc_run(@ddoc,
+            ["lists","simple"],
+            [{"foo"=>"bar"}, {"q" => "ok"}]
+          ).should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
+          @qs.run(["list_row", {"key"=>"baz"}]).should ==  ["chunks", ["baz"]]
+          @qs.run(["list_row", {"key"=>"bam"}]).should ==  ["chunks", ["bam"]]
+          @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]]
+          @qs.run(["list_row", {"key"=>"fooz"}]).should == ["chunks", ["fooz"]]
+          @qs.run(["list_row", {"key"=>"foox"}]).should == ["chunks", ["foox"]]
+          @qs.run(["list_end"]).should == ["end" , ["early"]]
+        end
+      end
+      
+      describe "headers" do
+        it "should do headers proper" do
+          @qs.ddoc_run(@ddoc, ["lists","headers"], 
+            [{"total_rows"=>1000}, {"q" => "ok"}]
+          ).should == ["start", ["first chunk", 'second "chunk"'], 
+            {"headers"=>{"Content-Type"=>"text/plain"}}]
+          @qs.rrun(["list_end"])
+          @qs.jsgets.should == ["end", ["tail"]]
+        end
+      end
+
+      describe "with rows" do
+        it "should list em" do
+          @qs.ddoc_run(@ddoc, ["lists","rows"], 
+            [{"foo"=>"bar"}, {"q" => "ok"}]).
+            should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
+          @qs.rrun(["list_row", {"key"=>"baz"}])
+          @qs.get_chunks.should == ["baz"]
+          @qs.rrun(["list_row", {"key"=>"bam"}])
+          @qs.get_chunks.should == ["bam"]
+          @qs.rrun(["list_end"])
+          @qs.jsgets.should == ["end", ["tail"]]
+        end
+        it "should work with zero rows" do
+          @qs.ddoc_run(@ddoc, ["lists","rows"],
+            [{"foo"=>"bar"}, {"q" => "ok"}]).
+            should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
+          @qs.rrun(["list_end"])
+          @qs.jsgets.should == ["end", ["tail"]]
+        end
+      end
+      
+      describe "should buffer multiple chunks sent for a single row." do
+        it "should should buffer em" do
+          @qs.ddoc_run(@ddoc, ["lists","buffer-chunks"],
+            [{"foo"=>"bar"}, {"q" => "ok"}]).
+            should == ["start", ["bacon"], {"headers"=>{}}]
+          @qs.rrun(["list_row", {"key"=>"baz"}])
+          @qs.get_chunks.should == ["baz", "eggs"]
+          @qs.rrun(["list_row", {"key"=>"bam"}])
+          @qs.get_chunks.should == ["bam", "eggs"]
+          @qs.rrun(["list_end"])
+          @qs.jsgets.should == ["end", ["tail"]]
+        end
+      end
+      it "should end after 2" do
+        @qs.ddoc_run(@ddoc, ["lists","chunky"],
+          [{"foo"=>"bar"}, {"q" => "ok"}]).
+          should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
+          
+        @qs.run(["list_row", {"key"=>"baz"}]).
+          should ==  ["chunks", ["baz"]]
+
+        @qs.run(["list_row", {"key"=>"bam"}]).
+          should ==  ["chunks", ["bam"]]
+
+        @qs.run(["list_row", {"key"=>"foom"}]).
+          should == ["end", ["foom", "early tail"]]
+        # here's where js has to discard quit properly
+        @qs.run(["reset"]).
+          should == true
+      end
+    end
+  end
+
+
+
+def should_have_exited qs
+  begin
+    qs.run(["reset"])
+    "raise before this (except Erlang)".should == true
+  rescue RuntimeError => e
+    e.message.should == "no response"
+  rescue Errno::EPIPE
+    true.should == true
+  end
+end
+
+describe "query server that exits" do
+  before(:each) do
+    @qs = QueryServerRunner.run
+    @ddoc = {
+      "_id" => "foo",
+      "lists" => {
+        "capped" => functions["list-capped"][LANGUAGE],
+        "raw" => functions["list-raw"][LANGUAGE]
+      },
+      "shows" => {
+        "fatal" => functions["fatal"][LANGUAGE]
+      }
+    }
+    @qs.teach_ddoc(@ddoc)
+  end
+  after(:each) do
+    @qs.close
+  end
+
+  describe "only goes to 2 list" do
+    it "should exit if erlang sends too many rows" do
+      @qs.ddoc_run(@ddoc, ["lists","capped"],
+        [{"foo"=>"bar"}, {"q" => "ok"}]).
+        should == ["start", ["bacon"], {"headers"=>{}}]
+      @qs.run(["list_row", {"key"=>"baz"}]).should ==  ["chunks", ["baz"]]
+      @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]]
+      @qs.run(["list_row", {"key"=>"fooz"}]).should == ["end", ["fooz", "early"]]
+      e = @qs.run(["list_row", {"key"=>"foox"}])
+      e[0].should == "error"
+      e[1].should == "unknown_command"
+      should_have_exited @qs
+    end
+  end
+
+  describe "raw list" do
+    it "should exit if it gets a non-row in the middle" do
+      @qs.ddoc_run(@ddoc, ["lists","raw"],
+        [{"foo"=>"bar"}, {"q" => "ok"}]).
+        should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
+      e = @qs.run(["reset"])
+      e[0].should == "error"
+      e[1].should == "list_error"
+      should_have_exited @qs
+    end
+  end
+  
+  describe "fatal error" do
+    it "should exit" do
+      @qs.ddoc_run(@ddoc, ["shows","fatal"],
+        [{"foo"=>"bar"}, {"q" => "ok"}]).
+        should == ["error", "error_key", "testing"]
+      should_have_exited @qs
+    end
+  end
+end
+
+describe "thank you for using the tests" do
+  it "for more info run with QS_TRACE=true or see query_server_spec.rb file header" do
+  end
+end


[04/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/076-file-compression.t
----------------------------------------------------------------------
diff --git a/test/etap/076-file-compression.t b/test/etap/076-file-compression.t
deleted file mode 100755
index 2929230..0000000
--- a/test/etap/076-file-compression.t
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-test_db_name() -> <<"couch_test_file_compression">>.
-ddoc_id() -> <<"_design/test">>.
-num_docs() -> 5000.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(10),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    couch_config:set("couchdb", "file_compression", "none", false),
-
-    create_database(),
-    compact_db(),
-    compact_view(),
-    DbDiskSize1 = db_disk_size(),
-    ViewDiskSize1 = view_disk_size(),
-
-    couch_config:set("couchdb", "file_compression", "snappy", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize2 = db_disk_size(),
-    ViewDiskSize2 = view_disk_size(),
-
-    etap:is(DbDiskSize2 < DbDiskSize1, true, "Database disk size decreased"),
-    etap:is(ViewDiskSize2 < ViewDiskSize1, true, "Index disk size decreased"),
-
-    couch_config:set("couchdb", "file_compression", "deflate_9", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize3 = db_disk_size(),
-    ViewDiskSize3 = view_disk_size(),
-
-    etap:is(DbDiskSize3 < DbDiskSize2, true, "Database disk size decreased again"),
-    etap:is(ViewDiskSize3 < ViewDiskSize2, true, "Index disk size decreased again"),
-
-    couch_config:set("couchdb", "file_compression", "deflate_1", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize4 = db_disk_size(),
-    ViewDiskSize4 = view_disk_size(),
-
-    etap:is(DbDiskSize4 > DbDiskSize3, true, "Database disk size increased"),
-    etap:is(ViewDiskSize4 > ViewDiskSize3, true, "Index disk size increased"),
-
-    couch_config:set("couchdb", "file_compression", "snappy", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize5 = db_disk_size(),
-    ViewDiskSize5 = view_disk_size(),
-
-    etap:is(DbDiskSize5 > DbDiskSize4, true, "Database disk size increased again"),
-    etap:is(ViewDiskSize5 > ViewDiskSize4, true, "Index disk size increased again"),
-
-    couch_config:set("couchdb", "file_compression", "none", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize6 = db_disk_size(),
-    ViewDiskSize6 = view_disk_size(),
-
-    etap:is(DbDiskSize6 > DbDiskSize5, true, "Database disk size increased again"),
-    etap:is(ViewDiskSize6 > ViewDiskSize5, true, "Index disk size increased again"),
-
-    delete_db(),
-    couch_server_sup:stop(),
-    ok.
-
-
-create_database() ->
-    {ok, Db} = couch_db:create(
-        test_db_name(),
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]),
-    ok = populate_db(Db, num_docs()),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, ddoc_id()},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-                {<<"view1">>, {[
-                    {<<"map">>, <<"function(doc) { emit(doc._id, doc.string); }">>}
-                ]}}
-            ]}
-        }
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, []),
-    refresh_index(),
-    ok = couch_db:close(Db).
-
-
-populate_db(_Db, NumDocs) when NumDocs =< 0 ->
-    ok;
-populate_db(Db, NumDocs) ->
-    Docs = lists:map(
-        fun(_) ->
-            couch_doc:from_json_obj({[
-                {<<"_id">>, couch_uuids:random()},
-                {<<"string">>, list_to_binary(lists:duplicate(1000, $X))}
-            ]})
-        end,
-        lists:seq(1, 500)),
-    {ok, _} = couch_db:update_docs(Db, Docs, []),
-    populate_db(Db, NumDocs - 500).
-
-
-refresh_index() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, DDoc} = couch_db:open_doc(Db, ddoc_id(), [ejson_body]),
-    couch_mrview:query_view(Db, DDoc, <<"view1">>, [{stale, false}]),
-    ok = couch_db:close(Db).
-
-
-compact_db() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, CompactPid} = couch_db:start_compact(Db),
-    MonRef = erlang:monitor(process, CompactPid),
-    receive
-    {'DOWN', MonRef, process, CompactPid, normal} ->
-        ok;
-    {'DOWN', MonRef, process, CompactPid, Reason} ->
-        etap:bail("Error compacting database: " ++ couch_util:to_list(Reason))
-    after 120000 ->
-        etap:bail("Timeout waiting for database compaction")
-    end,
-    ok = couch_db:close(Db).
-
-
-compact_view() ->
-    {ok, MonRef} = couch_mrview:compact(test_db_name(), ddoc_id(), [monitor]),
-    receive
-    {'DOWN', MonRef, process, _CompactPid, normal} ->
-        ok;
-    {'DOWN', MonRef, process, _CompactPid, Reason} ->
-        etap:bail("Error compacting view group: " ++ couch_util:to_list(Reason))
-    after 120000 ->
-        etap:bail("Timeout waiting for view group compaction")
-    end.
-
-
-db_disk_size() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, Info} = couch_db:get_db_info(Db),
-    ok = couch_db:close(Db),
-    couch_util:get_value(disk_size, Info).
-
-
-view_disk_size() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, DDoc} = couch_db:open_doc(Db, ddoc_id(), [ejson_body]),
-    {ok, Info} = couch_mrview:get_info(Db, DDoc),
-    ok = couch_db:close(Db),
-    couch_util:get_value(disk_size, Info).
-
-
-delete_db() ->
-    ok = couch_server:delete(
-        test_db_name(), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/077-couch-db-fast-db-delete-create.t
----------------------------------------------------------------------
diff --git a/test/etap/077-couch-db-fast-db-delete-create.t b/test/etap/077-couch-db-fast-db-delete-create.t
deleted file mode 100644
index 2026698..0000000
--- a/test/etap/077-couch-db-fast-db-delete-create.t
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-
-    test_util:init_code_path(),
-
-    etap:plan(unknown),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            Msg = io_lib:format("Test died abnormally: ~p", [Other]),
-            etap:diag(Msg),
-            etap:bail(Msg)
-        end,
-    ok.
-
-loop(0) ->
-    ok;
-loop(N) ->
-    ok = cycle(),
-    loop(N - 1).
-
-cycle() ->
-    ok = couch_server:delete(<<"etap-test-db">>, []),
-    {ok, _Db} = couch_db:create(<<"etap-test-db">>, []),
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-
-    {ok, _Db} = couch_db:create(<<"etap-test-db">>, []),
-
-    ok = loop(1),
-    ok = loop(10),
-    ok = loop(100),
-    ok = loop(1000),
-
-    % for more thorough testing:
-    % ok = loop(10000),
-    % ok = loop(100000),
-    % ok = loop(1000000),
-    % ok = loop(10000000),
-
-    ok = couch_server:delete(<<"etap-test-db">>, []),
-
-    etap:is(true, true, "lots of creating and deleting of a database"),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/080-config-get-set.t
----------------------------------------------------------------------
diff --git a/test/etap/080-config-get-set.t b/test/etap/080-config-get-set.t
deleted file mode 100755
index 94a9cba..0000000
--- a/test/etap/080-config-get-set.t
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(12),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    % start couch_config with default
-    couch_config:start_link([default_config()]),
-
-
-    % Check that we can get values
-
-
-    etap:fun_is(
-        fun(List) -> length(List) > 0 end,
-        couch_config:all(),
-        "Data was loaded from the INI file."
-    ),
-
-    etap:fun_is(
-        fun(List) -> length(List) > 0 end,
-        couch_config:get("daemons"),
-        "There are settings in the [daemons] section of the INI file."
-    ),
-
-    etap:is(
-        couch_config:get("httpd_design_handlers", "_view"),
-        "{couch_mrview_http, handle_view_req}",
-        "The {httpd_design_handlers, view} is the expected default."
-    ),
-
-    etap:is(
-        couch_config:get("httpd", "foo", "bar"),
-        "bar",
-        "Returns the default when key doesn't exist in config."
-    ),
-
-    etap:is(
-        couch_config:get("httpd", "foo"),
-        undefined,
-        "The default default is the atom 'undefined'."
-    ),
-
-    etap:is(
-        couch_config:get("httpd", "port", "bar"),
-        "5984",
-        "Only returns the default when the config setting does not exist."
-    ),
-
-
-    % Check that setting values works.
-
-
-    ok = couch_config:set("log", "level", "severe", false),
-
-    etap:is(
-        couch_config:get("log", "level"),
-        "severe",
-        "Non persisted changes take effect."
-    ),
-
-    etap:is(
-        couch_config:get("new_section", "bizzle"),
-        undefined,
-        "Section 'new_section' does not exist."
-    ),
-
-    ok = couch_config:set("new_section", "bizzle", "bang", false),
-
-    etap:is(
-        couch_config:get("new_section", "bizzle"),
-        "bang",
-        "New section 'new_section' was created for a new key/value pair."
-    ),
-
-
-    % Check that deleting works
-
-
-    ok = couch_config:delete("new_section", "bizzle", false),
-    etap:is(
-        couch_config:get("new_section", "bizzle"),
-        undefined,
-        "Deleting sets the value to \"\""
-    ),
-
-
-    % Check ge/set/delete binary strings
-
-    ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false),
-    etap:is(
-        couch_config:get(<<"foo">>, <<"bar">>),
-        <<"baz">>,
-        "Can get and set with binary section and key values."
-    ),
-    ok = couch_config:delete(<<"foo">>, <<"bar">>, false),
-    etap:is(
-        couch_config:get(<<"foo">>, <<"bar">>),
-        undefined,
-        "Deleting with binary section/key pairs sets the value to \"\""
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/081-config-override.1.ini
----------------------------------------------------------------------
diff --git a/test/etap/081-config-override.1.ini b/test/etap/081-config-override.1.ini
deleted file mode 100644
index 55451da..0000000
--- a/test/etap/081-config-override.1.ini
+++ /dev/null
@@ -1,22 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[couchdb]
-max_dbs_open=10
-
-[httpd]
-port=4895

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/081-config-override.2.ini
----------------------------------------------------------------------
diff --git a/test/etap/081-config-override.2.ini b/test/etap/081-config-override.2.ini
deleted file mode 100644
index 5f46357..0000000
--- a/test/etap/081-config-override.2.ini
+++ /dev/null
@@ -1,22 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[httpd]
-port = 80
-
-[fizbang]
-unicode = normalized

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/081-config-override.t
----------------------------------------------------------------------
diff --git a/test/etap/081-config-override.t b/test/etap/081-config-override.t
deleted file mode 100755
index 01f8b4c..0000000
--- a/test/etap/081-config-override.t
+++ /dev/null
@@ -1,212 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-local_config_1() ->
-    test_util:source_file("test/etap/081-config-override.1.ini").
-
-local_config_2() ->
-    test_util:source_file("test/etap/081-config-override.2.ini").
-
-local_config_write() ->
-    test_util:build_file("test/etap/temp.081").
-
-% Run tests and wait for the config gen_server to shutdown.
-run_tests(IniFiles, Tests) ->
-    {ok, Pid} = couch_config:start_link(IniFiles),
-    erlang:monitor(process, Pid),
-    Tests(),
-    couch_config:stop(),
-    receive
-        {'DOWN', _, _, Pid, _} -> ok;
-        _Other -> etap:diag("OTHER: ~p~n", [_Other])
-    after
-        1000 -> throw({timeout_error, config_stop})
-    end.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(17),
-
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    CheckStartStop = fun() -> ok end,
-    run_tests([default_config()], CheckStartStop),
-
-    CheckDefaults = fun() ->
-        etap:is(
-            couch_config:get("couchdb", "max_dbs_open"),
-            "100",
-            "{couchdb, max_dbs_open} is 100 by defualt."
-        ),
-
-        etap:is(
-            couch_config:get("httpd","port"),
-            "5984",
-            "{httpd, port} is 5984 by default"
-        ),
-
-        etap:is(
-            couch_config:get("fizbang", "unicode"),
-            undefined,
-            "{fizbang, unicode} is undefined by default"
-        )
-    end,
-
-    run_tests([default_config()], CheckDefaults),
-
-
-    % Check that subsequent files override values appropriately
-
-    CheckOverride = fun() ->
-        etap:is(
-            couch_config:get("couchdb", "max_dbs_open"),
-            "10",
-            "{couchdb, max_dbs_open} was overriden with the value 10"
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "4895",
-            "{httpd, port} was overriden with the value 4895"
-        )
-    end,
-
-    run_tests([default_config(), local_config_1()], CheckOverride),
-
-
-    % Check that overrides can create new sections
-
-    CheckOverride2 = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "80",
-            "{httpd, port} is overriden with the value 80"
-        ),
-
-        etap:is(
-            couch_config:get("fizbang", "unicode"),
-            "normalized",
-            "{fizbang, unicode} was created by override INI file"
-        )
-    end,
-
-    run_tests([default_config(), local_config_2()], CheckOverride2),
-
-
-    % Check that values can be overriden multiple times
-
-    CheckOverride3 = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "80",
-            "{httpd, port} value was taken from the last specified INI file."
-        )
-    end,
-
-    run_tests(
-        [default_config(), local_config_1(), local_config_2()],
-        CheckOverride3
-    ),
-
-    % Check persistence to last file.
-
-    % Empty the file in case it exists.
-    {ok, Fd} = file:open(local_config_write(), write),
-    ok = file:truncate(Fd),
-    ok = file:close(Fd),
-
-    % Open and write a value
-    CheckCanWrite = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "5984",
-            "{httpd, port} is still 5984 by default"
-        ),
-
-        etap:is(
-            couch_config:set("httpd", "port", "8080"),
-            ok,
-            "Writing {httpd, port} is kosher."
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "8080",
-            "{httpd, port} was updated to 8080 successfully."
-        ),
-
-        etap:is(
-            couch_config:delete("httpd", "bind_address"),
-            ok,
-            "Deleting {httpd, bind_address} succeeds"
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "bind_address"),
-            undefined,
-            "{httpd, bind_address} was actually deleted."
-        )
-    end,
-
-    run_tests([default_config(), local_config_write()], CheckCanWrite),
-
-    % Open and check where we don't expect persistence.
-
-    CheckDidntWrite = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "5984",
-            "{httpd, port} was not persisted to the primary INI file."
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "bind_address"),
-            "127.0.0.1",
-            "{httpd, bind_address} was not deleted form the primary INI file."
-        )
-    end,
-
-    run_tests([default_config()], CheckDidntWrite),
-
-    % Open and check we have only the persistence we expect.
-    CheckDidWrite = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "8080",
-            "{httpd, port} is still 8080 after reopening the config."
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "bind_address"),
-            undefined,
-            "{httpd, bind_address} is still \"\" after reopening."
-        )
-    end,
-
-    run_tests([local_config_write()], CheckDidWrite),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/082-config-register.t
----------------------------------------------------------------------
diff --git a/test/etap/082-config-register.t b/test/etap/082-config-register.t
deleted file mode 100755
index 191ba8f..0000000
--- a/test/etap/082-config-register.t
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(5),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link([default_config()]),
-
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "5984",
-        "{httpd, port} is 5984 by default."
-    ),
-
-    ok = couch_config:set("httpd", "port", "4895", false),
-
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "4895",
-        "{httpd, port} changed to 4895"
-    ),
-
-    SentinelFunc = fun() ->
-        % Ping/Pong to make sure we wait for this
-        % process to die
-        receive {ping, From} -> From ! pong end
-    end,
-    SentinelPid = spawn(SentinelFunc),
-
-    couch_config:register(
-        fun("httpd", "port", Value) ->
-            etap:is(Value, "8080", "Registered function got notification.")
-        end,
-        SentinelPid
-    ),
-
-    ok = couch_config:set("httpd", "port", "8080", false),
-
-    % Implicitly checking that we *don't* call the function
-    etap:is(
-        couch_config:get("httpd", "bind_address"),
-        "127.0.0.1",
-        "{httpd, bind_address} is not '0.0.0.0'"
-    ),
-    ok = couch_config:set("httpd", "bind_address", "0.0.0.0", false),
-
-    % Ping-Pong kill process
-    SentinelPid ! {ping, self()},
-    receive
-        _Any -> ok
-    after 1000 ->
-        throw({timeout_error, registered_pid})
-    end,
-
-    ok = couch_config:set("httpd", "port", "80", false),
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "80",
-        "Implicitly test that the function got de-registered"
-    ),
-
-    % test passing of Persist flag
-    couch_config:register(
-        fun("httpd", _, _, Persist) ->
-            etap:is(Persist, false)
-        end),
-    ok = couch_config:set("httpd", "port", "80", false),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/083-config-no-files.t
----------------------------------------------------------------------
diff --git a/test/etap/083-config-no-files.t b/test/etap/083-config-no-files.t
deleted file mode 100755
index 0ce38e6..0000000
--- a/test/etap/083-config-no-files.t
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(3),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link([]),
-
-    etap:fun_is(
-        fun(KVPairs) -> length(KVPairs) == 0 end,
-        couch_config:all(),
-        "No INI files specified returns 0 key/value pairs."
-    ),
-
-    ok = couch_config:set("httpd", "port", "80", false),
-
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "80",
-        "Created a new non-persisted k/v pair."
-    ),
-
-    ok = couch_config:set("httpd", "bind_address", "127.0.0.1"),
-    etap:is(
-        couch_config:get("httpd", "bind_address"),
-        "127.0.0.1",
-        "Asking for a persistent key/value pair doesn't choke."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/090-task-status.t
----------------------------------------------------------------------
diff --git a/test/etap/090-task-status.t b/test/etap/090-task-status.t
deleted file mode 100755
index 23115bd..0000000
--- a/test/etap/090-task-status.t
+++ /dev/null
@@ -1,279 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(28),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-get_task_prop(Pid, Prop) ->
-    From = list_to_binary(pid_to_list(Pid)),
-    Element = lists:foldl(
-        fun(PropList,Acc) ->
-            case couch_util:get_value(pid,PropList) of
-                From ->
-                    [PropList | Acc];
-                _ ->
-                    []
-            end
-        end,
-        [], couch_task_status:all()
-    ),
-    case couch_util:get_value(Prop, hd(Element), nil) of
-    nil ->
-        etap:bail("Could not get property '" ++ couch_util:to_list(Prop) ++
-            "' for task " ++ pid_to_list(Pid));
-    Value ->
-        Value
-    end.
-
-
-loop() ->
-    receive
-    {add, Props, From} ->
-        Resp = couch_task_status:add_task(Props),
-        From ! {ok, self(), Resp},
-        loop();
-    {update, Props, From} ->
-        Resp = couch_task_status:update(Props),
-        From ! {ok, self(), Resp},
-        loop();
-    {update_frequency, Msecs, From} ->
-        Resp = couch_task_status:set_update_frequency(Msecs),
-        From ! {ok, self(), Resp},
-        loop();
-    {done, From} ->
-        From ! {ok, self(), ok}
-    end.
-
-call(Pid, Command) ->
-    Pid ! {Command, self()},
-    wait(Pid).
-
-call(Pid, Command, Arg) ->
-    Pid ! {Command, Arg, self()},
-    wait(Pid).
-
-wait(Pid) ->
-    receive
-        {ok, Pid, Msg} -> Msg
-    after 1000 ->
-        throw(timeout_error)
-    end.
-
-test() ->
-    {ok, TaskStatusPid} = couch_task_status:start_link(),
-
-    TaskUpdater = fun() -> loop() end,
-    % create three updaters
-    Pid1 = spawn(TaskUpdater),
-    Pid2 = spawn(TaskUpdater),
-    Pid3 = spawn(TaskUpdater),
-
-    ok = call(Pid1, add, [{type, replication}, {progress, 0}]),
-    etap:is(
-        length(couch_task_status:all()),
-        1,
-        "Started a task"
-    ),
-    Task1StartTime = get_task_prop(Pid1, started_on),
-    etap:is(
-        is_integer(Task1StartTime),
-        true,
-        "Task start time is defined."
-    ),
-    etap:is(
-        get_task_prop(Pid1, updated_on),
-        Task1StartTime,
-        "Task's start time is the same as the update time before an update."
-    ),
-
-    etap:is(
-        call(Pid1, add, [{type, compaction}, {progress, 0}]),
-        {add_task_error, already_registered},
-        "Unable to register multiple tasks for a single Pid."
-    ),
-
-    etap:is(
-        get_task_prop(Pid1, type),
-        replication,
-        "Task type is 'replication'."
-    ),
-    etap:is(
-        get_task_prop(Pid1, progress),
-        0,
-        "Task progress is 0."
-    ),
-
-    ok = timer:sleep(1000),
-    call(Pid1, update, [{progress, 25}]),
-    etap:is(
-        get_task_prop(Pid1, progress),
-        25,
-        "Task progress is 25."
-    ),
-    etap:is(
-        get_task_prop(Pid1, updated_on) > Task1StartTime,
-        true,
-        "Task's last update time has increased after an update."
-    ),
-
-    call(Pid2, add, [{type, compaction}, {progress, 0}]),
-    etap:is(
-        length(couch_task_status:all()),
-        2,
-        "Started a second task."
-    ),
-    Task2StartTime = get_task_prop(Pid2, started_on),
-    etap:is(
-        is_integer(Task2StartTime),
-        true,
-        "Second task's start time is defined."
-    ),
-    etap:is(
-        get_task_prop(Pid2, updated_on),
-        Task2StartTime,
-        "Second task's start time is the same as the update time before an update."
-    ),
-
-    etap:is(
-        get_task_prop(Pid2, type),
-        compaction,
-        "Second task's type is 'compaction'."
-    ),
-    etap:is(
-        get_task_prop(Pid2, progress),
-        0,
-        "Second task's progress is 0."
-    ),
-
-    ok = timer:sleep(1000),
-    call(Pid2, update, [{progress, 33}]),
-    etap:is(
-        get_task_prop(Pid2, progress),
-        33,
-        "Second task's progress updated to 33."
-    ),
-    etap:is(
-        get_task_prop(Pid2, updated_on) > Task2StartTime,
-        true,
-        "Second task's last update time has increased after an update."
-    ),
-
-    call(Pid3, add, [{type, indexer}, {progress, 0}]),
-    etap:is(
-        length(couch_task_status:all()),
-        3,
-        "Registered a third task."
-    ),
-    Task3StartTime = get_task_prop(Pid3, started_on),
-    etap:is(
-        is_integer(Task3StartTime),
-        true,
-        "Third task's start time is defined."
-    ),
-    etap:is(
-        get_task_prop(Pid3, updated_on),
-        Task3StartTime,
-        "Third task's start time is the same as the update time before an update."
-    ),
-
-    etap:is(
-        get_task_prop(Pid3, type),
-        indexer,
-        "Third task's type is 'indexer'."
-    ),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        0,
-        "Third task's progress is 0."
-    ),
-
-    ok = timer:sleep(1000),
-    call(Pid3, update, [{progress, 50}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        50,
-        "Third task's progress updated to 50."
-    ),
-    etap:is(
-        get_task_prop(Pid3, updated_on) > Task3StartTime,
-        true,
-        "Third task's last update time has increased after an update."
-    ),
-
-    call(Pid3, update_frequency, 500),
-    call(Pid3, update, [{progress, 66}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        66,
-        "Third task's progress updated to 66."
-    ),
-
-    call(Pid3, update, [{progress, 67}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        66,
-        "Task update dropped because of frequency limit."
-    ),
-
-    call(Pid3, update_frequency, 0),
-    call(Pid3, update, [{progress, 77}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        77,
-        "Task updated after reseting frequency limit."
-    ),
-
-
-    call(Pid1, done),
-    etap:is(
-        length(couch_task_status:all()),
-        2,
-        "First task finished."
-    ),
-
-    call(Pid2, done),
-    etap:is(
-        length(couch_task_status:all()),
-        1,
-        "Second task finished."
-    ),
-
-    call(Pid3, done),
-    etap:is(
-        length(couch_task_status:all()),
-        0,
-        "Third task finished."
-    ),
-
-    erlang:monitor(process, TaskStatusPid),
-    couch_task_status:stop(),
-    receive
-        {'DOWN', _, _, TaskStatusPid, _} ->
-            ok
-    after
-        1000 ->
-            throw(timeout_error)
-    end,
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/100-ref-counter.t
----------------------------------------------------------------------
diff --git a/test/etap/100-ref-counter.t b/test/etap/100-ref-counter.t
deleted file mode 100755
index 8f996d0..0000000
--- a/test/etap/100-ref-counter.t
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(8),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-loop() ->
-    receive
-        close -> ok
-    end.
-
-wait() ->
-    receive
-        {'DOWN', _, _, _, _} -> ok
-    after 1000 ->
-        throw(timeout_error)
-    end.
-
-test() ->
-    {ok, RefCtr} = couch_ref_counter:start([]),
-
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        1,
-        "A ref_counter is initialized with the calling process as a referer."
-    ),
-
-    ChildPid1 = spawn(fun() -> loop() end),
-
-    % This is largely implicit in that nothing else breaks
-    % as ok is just returned from gen_server:cast()
-    etap:is(
-        couch_ref_counter:drop(RefCtr, ChildPid1),
-        ok,
-        "Dropping an unknown Pid is ignored."
-    ),
-
-    couch_ref_counter:add(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Adding a Pid to the ref_counter increases it's count."
-    ),
-
-    couch_ref_counter:add(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Readding the same Pid maintains the count but increments it's refs."
-    ),
-
-    couch_ref_counter:drop(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Droping the doubly added Pid only removes a ref, not a referer."
-    ),
-
-    couch_ref_counter:drop(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        1,
-        "Dropping the second ref drops the referer."
-    ),
-
-    couch_ref_counter:add(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Sanity checking that the Pid was re-added."
-    ),
-
-    erlang:monitor(process, ChildPid1),
-    ChildPid1 ! close,
-    wait(),
-    
-    CheckFun = fun
-        (Iter, nil) ->
-            case couch_ref_counter:count(RefCtr) of
-                1 -> Iter;
-                _ -> nil
-            end;
-        (_, Acc) ->
-            Acc
-    end,
-    Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)),
-    etap:isnt(
-        Result,
-        nil,
-        "The referer count was decremented automatically on process exit."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/120-stats-collect.t
----------------------------------------------------------------------
diff --git a/test/etap/120-stats-collect.t b/test/etap/120-stats-collect.t
deleted file mode 100755
index a30f9ac..0000000
--- a/test/etap/120-stats-collect.t
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(11),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_stats_collector:start(),
-    ok = test_counters(),
-    ok = test_abs_values(),
-    ok = test_proc_counting(),
-    ok = test_all(),
-    ok.
-
-test_counters() ->
-    AddCount = fun() -> couch_stats_collector:increment(foo) end,
-    RemCount = fun() -> couch_stats_collector:decrement(foo) end,
-    repeat(AddCount, 100),
-    repeat(RemCount, 25),
-    repeat(AddCount, 10),
-    repeat(RemCount, 5),
-    etap:is(
-        couch_stats_collector:get(foo),
-        80,
-        "Incrememnt tracks correctly."
-    ),
-
-    repeat(RemCount, 80),
-    etap:is(
-        couch_stats_collector:get(foo),
-        0,
-        "Decremented to zaro."
-    ),
-    ok.
-
-test_abs_values() ->
-    lists:map(fun(Val) ->
-        couch_stats_collector:record(bar, Val)
-    end, lists:seq(1, 15)),
-    etap:is(
-        couch_stats_collector:get(bar),
-        lists:seq(1, 15),
-        "Absolute values are recorded correctly."
-    ),
-    
-    couch_stats_collector:clear(bar),
-    etap:is(
-        couch_stats_collector:get(bar),
-        nil,
-        "Absolute values are cleared correctly."
-    ),
-    ok.
-
-test_proc_counting() ->
-    Self = self(),
-    OnePid = spawn(fun() ->
-        couch_stats_collector:track_process_count(hoopla),
-        Self ! reporting,
-        receive sepuku -> ok end
-    end),
-    R1 = erlang:monitor(process, OnePid),
-    receive reporting -> ok end,
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        1,
-        "track_process_count increments the counter."
-    ),
-    
-    TwicePid = spawn(fun() ->
-        couch_stats_collector:track_process_count(hoopla),
-        couch_stats_collector:track_process_count(hoopla),
-        Self ! reporting,
-        receive sepuku -> ok end
-    end),
-    R2 = erlang:monitor(process, TwicePid),
-    receive reporting -> ok end,
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        3,
-        "track_process_count allows more than one incrememnt per Pid"
-    ),
-    
-    OnePid ! sepuku,
-    receive {'DOWN', R1, _, _, _} -> ok end,
-    timer:sleep(250),
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        2,
-        "Process count is decremented when process exits."
-    ),
-    
-    TwicePid ! sepuku,
-    receive {'DOWN', R2, _, _, _} -> ok end,
-    timer:sleep(250),
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        0,
-        "Process count is decremented for each call to track_process_count."
-    ),
-    ok.
-
-test_all() ->
-    couch_stats_collector:record(bar, 0.0),
-    couch_stats_collector:record(bar, 1.0),
-    etap:is(
-        lists:sort(couch_stats_collector:all()),
-        [ {bar,[1.0,0.0]}, {foo,0}, { hoopla,0} ],
-        "all/0 returns all counters and absolute values."
-    ),
-    
-    etap:is(
-        lists:sort(couch_stats_collector:all(incremental)),
-        [ {foo, 0}, {hoopla, 0} ],
-        "all/1 returns only the specified type."
-    ),
-    
-    couch_stats_collector:record(zing, 90),
-    etap:is(
-        lists:sort(couch_stats_collector:all(absolute)),
-        [ {bar,[1.0,0.0]}, {zing,"Z"} ],
-        "all/1 returns only the specified type."
-    ),
-    ok.
-
-repeat(_, 0) ->
-    ok;
-repeat(Fun, Count) ->
-    Fun(),
-    repeat(Fun, Count-1).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/121-stats-aggregates.cfg
----------------------------------------------------------------------
diff --git a/test/etap/121-stats-aggregates.cfg b/test/etap/121-stats-aggregates.cfg
deleted file mode 100644
index 30e475d..0000000
--- a/test/etap/121-stats-aggregates.cfg
+++ /dev/null
@@ -1,19 +0,0 @@
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-% 
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-{testing, stuff, "yay description"}.
-{number, '11', "randomosity"}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/121-stats-aggregates.ini
----------------------------------------------------------------------
diff --git a/test/etap/121-stats-aggregates.ini b/test/etap/121-stats-aggregates.ini
deleted file mode 100644
index cc5cd21..0000000
--- a/test/etap/121-stats-aggregates.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[stats]
-rate = 10000000 ; We call collect_sample in testing
-samples = [0, 1]

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/121-stats-aggregates.t
----------------------------------------------------------------------
diff --git a/test/etap/121-stats-aggregates.t b/test/etap/121-stats-aggregates.t
deleted file mode 100755
index d678aa9..0000000
--- a/test/etap/121-stats-aggregates.t
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-ini_file() ->
-    test_util:source_file("test/etap/121-stats-aggregates.ini").
-
-cfg_file() ->
-    test_util:source_file("test/etap/121-stats-aggregates.cfg").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(17),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link([ini_file()]),
-    couch_stats_collector:start(),
-    couch_stats_aggregator:start(cfg_file()),
-    ok = test_all_empty(),
-    ok = test_get_empty(),
-    ok = test_count_stats(),
-    ok = test_abs_stats(),
-    ok.
-
-test_all_empty() ->
-    {Aggs} = couch_stats_aggregator:all(),
-
-    etap:is(length(Aggs), 2, "There are only two aggregate types in testing."),
-    etap:is(
-        couch_util:get_value(testing, Aggs),
-        {[{stuff, make_agg(<<"yay description">>,
-            null, null, null, null, null)}]},
-        "{testing, stuff} is empty at start."
-    ),
-    etap:is(
-        couch_util:get_value(number, Aggs),
-        {[{'11', make_agg(<<"randomosity">>,
-            null, null, null, null, null)}]},
-        "{number, '11'} is empty at start."
-    ),
-    ok.
-    
-test_get_empty() ->
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, null, null, null, null, null),
-        "Getting {testing, stuff} returns an empty aggregate."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, '11'}),
-        make_agg(<<"randomosity">>, null, null, null, null, null),
-        "Getting {number, '11'} returns an empty aggregate."
-    ),
-    ok.
-
-test_count_stats() ->
-    lists:foreach(fun(_) ->
-        couch_stats_collector:increment({testing, stuff})
-    end, lists:seq(1, 100)),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, 100, 100, null, 100, 100),
-        "COUNT: Adding values changes the stats."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}, 1),
-        make_agg(<<"yay description">>, 100, 100, null, 100, 100),
-        "COUNT: Adding values changes stats for all times."
-    ),
-
-    timer:sleep(500),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100),
-        "COUNT: Removing values changes stats."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}, 1),
-        make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100),
-        "COUNT: Removing values changes stats for all times."
-    ),
-
-    timer:sleep(600),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, 100, 33.333, 57.735, 0, 100),
-        "COUNT: Letting time passes doesn't remove data from time 0 aggregates"
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}, 1),
-        make_agg(<<"yay description">>, 0, 0, 0, 0, 0),
-        "COUNT: Letting time pass removes data from other time aggregates."
-    ),
-    ok.
-
-test_abs_stats() ->
-    lists:foreach(fun(X) ->
-        couch_stats_collector:record({number, 11}, X)
-    end, lists:seq(0, 10)),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}),
-        make_agg(<<"randomosity">>, 5, 5, null, 5, 5),
-        "ABS: Adding values changes the stats."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}, 1),
-        make_agg(<<"randomosity">>, 5, 5, null, 5, 5),
-        "ABS: Adding values changes stats for all times."
-    ),
-
-    timer:sleep(500),
-    couch_stats_collector:record({number, 11}, 15),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}),
-        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
-        "ABS: New values changes stats"
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}, 1),
-        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
-        "ABS: Removing values changes stats for all times."
-    ),
-
-    timer:sleep(600),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}),
-        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
-        "ABS: Letting time passes doesn't remove data from time 0 aggregates"
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}, 1),
-        make_agg(<<"randomosity">>, 15, 15, null, 15, 15),
-        "ABS: Letting time pass removes data from other time aggregates."
-    ),
-    ok.
-
-make_agg(Desc, Sum, Mean, StdDev, Min, Max) ->
-    {[
-        {description, Desc},
-        {current, Sum},
-        {sum, Sum},
-        {mean, Mean},
-        {stddev, StdDev},
-        {min, Min},
-        {max, Max}
-    ]}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/130-attachments-md5.t
----------------------------------------------------------------------
diff --git a/test/etap/130-attachments-md5.t b/test/etap/130-attachments-md5.t
deleted file mode 100755
index a91c9bf..0000000
--- a/test/etap/130-attachments-md5.t
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env escript
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-test_db_name() ->
-    <<"etap-test-db">>.
-
-docid() ->
-    case get(docid) of
-        undefined ->
-            put(docid, 1),
-            "1";
-        Count ->
-            put(docid, Count+1),
-            integer_to_list(Count+1)
-    end.
-
-main(_) ->
-    test_util:init_code_path(),
-    
-    etap:plan(16),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    Addr = couch_config:get("httpd", "bind_address", any),
-    put(addr, Addr),
-    put(port, mochiweb_socket_server:get(couch_httpd, port)),
-    timer:sleep(1000),
-
-    couch_server:delete(test_db_name(), []),
-    couch_db:create(test_db_name(), []),
-
-    test_identity_without_md5(),
-    test_chunked_without_md5(),
-
-    test_identity_with_valid_md5(),
-    test_chunked_with_valid_md5_header(),
-    test_chunked_with_valid_md5_trailer(),
-
-    test_identity_with_invalid_md5(),
-    test_chunked_with_invalid_md5_header(),
-    test_chunked_with_invalid_md5_trailer(),
-
-    couch_server:delete(test_db_name(), []),
-    couch_server_sup:stop(),
-    ok.
-
-test_identity_without_md5() ->
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Content-Length: 34\r\n",
-        "\r\n",
-        "We all live in a yellow submarine!"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with identity encoding and no MD5"),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_chunked_without_md5() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n"
-        "0\r\n"
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with chunked encoding and no MD5"),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_identity_with_valid_md5() ->
-    AttData = "We all live in a yellow submarine!",
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Content-Length: 34\r\n",
-        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
-        "\r\n",
-        AttData],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with identity encoding and valid MD5"),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_chunked_with_valid_md5_header() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with chunked encoding and valid MD5 header."),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_chunked_with_valid_md5_trailer() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Trailer: Content-MD5\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with chunked encoding and valid MD5 trailer."),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_identity_with_invalid_md5() ->
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Content-Length: 34\r\n",
-        "Content-MD5: ", base64:encode(<<"foobar!">>), "\r\n",
-        "\r\n",
-        "We all live in a yellow submarine!"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 400, "Invalid MD5 header causes an error: identity"),
-    etap:is(
-        get_json(Json, [<<"error">>]),
-        <<"content_md5_mismatch">>,
-        "Body indicates reason for failure."
-    ).
-
-test_chunked_with_invalid_md5_header() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Content-MD5: ", base64:encode(<<"so sneaky...">>), "\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 400, "Invalid MD5 header causes an error: chunked"),
-    etap:is(
-        get_json(Json, [<<"error">>]),
-        <<"content_md5_mismatch">>,
-        "Body indicates reason for failure."
-    ).
-
-test_chunked_with_invalid_md5_trailer() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Trailer: Content-MD5\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "Content-MD5: ", base64:encode(<<"Kool-Aid Fountain!">>), "\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 400, "Invalid MD5 Trailer causes an error"),
-    etap:is(
-        get_json(Json, [<<"error">>]),
-        <<"content_md5_mismatch">>,
-        "Body indicates reason for failure."
-    ).
-
-
-get_socket() ->
-    Options = [binary, {packet, 0}, {active, false}],
-    {ok, Sock} = gen_tcp:connect(get(addr), get(port), Options),
-    Sock.
-
-do_request(Request) ->
-    Sock = get_socket(),
-    gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
-    timer:sleep(1000),
-    {ok, R} = gen_tcp:recv(Sock, 0),
-    gen_tcp:close(Sock),
-    [Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]),
-    {ok, {http_response, _, Code, _}, _} =
-        erlang:decode_packet(http, Header, []),
-    Json = ejson:decode(Body),
-    {Code, Json}.
-
-get_json(Json, Path) ->
-    couch_util:get_nested_json_value(Json, Path).
-
-to_hex(Val) ->
-    to_hex(Val, []).
-
-to_hex(0, Acc) ->
-    Acc;
-to_hex(Val, Acc) ->
-    to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
-
-hex_char(V) when V < 10 -> $0 + V;
-hex_char(V) -> $A + V - 10.
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/140-attachment-comp.t
----------------------------------------------------------------------
diff --git a/test/etap/140-attachment-comp.t b/test/etap/140-attachment-comp.t
deleted file mode 100755
index 6f075ce..0000000
--- a/test/etap/140-attachment-comp.t
+++ /dev/null
@@ -1,728 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-test_db_name() ->
-    <<"couch_test_atts_compression">>.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(85),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-    timer:sleep(1000),
-    couch_server:delete(test_db_name(), []),
-    couch_db:create(test_db_name(), []),
-
-    couch_config:set("attachments", "compression_level", "8", false),
-    couch_config:set("attachments", "compressible_types", "text/*", false),
-
-    create_1st_text_att(),
-    create_1st_png_att(),
-    create_2nd_text_att(),
-    create_2nd_png_att(),
-
-    tests_for_1st_text_att(),
-    tests_for_1st_png_att(),
-    tests_for_2nd_text_att(),
-    tests_for_2nd_png_att(),
-
-    create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
-    test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
-
-    test_create_already_compressed_att_with_invalid_content_encoding(
-        db_url() ++ "/doc_att_deflate",
-        "readme.txt",
-        zlib:compress(test_text_data()),
-        "deflate"
-    ),
-
-    % COUCHDB-1711 - avoid weird timng/scheduling/request handling issue
-    timer:sleep(100),
-
-    test_create_already_compressed_att_with_invalid_content_encoding(
-        db_url() ++ "/doc_att_compress",
-        "readme.txt",
-        % Note: As of OTP R13B04, it seems there's no LZW compression
-        % (i.e. UNIX compress utility implementation) lib in OTP.
-        % However there's a simple working Erlang implementation at:
-        % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
-        test_text_data(),
-        "compress"
-    ),
-
-    test_compressible_type_with_parameters(),
-
-    timer:sleep(3000), % to avoid mochiweb socket closed exceptions
-    couch_server:delete(test_db_name(), []),
-    couch_server_sup:stop(),
-    ok.
-
-db_url() ->
-    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
-    binary_to_list(test_db_name()).
-
-create_1st_text_att() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Content-Type", "text/plain"}],
-        put,
-        test_text_data()),
-    etap:is(Code, 201, "Created text attachment using the standalone api"),
-    ok.
-
-create_1st_png_att() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [{"Content-Type", "image/png"}],
-        put,
-        test_png_data()),
-    etap:is(Code, 201, "Created png attachment using the standalone api"),
-    ok.
-
-% create a text attachment using the non-standalone attachment api
-create_2nd_text_att() ->
-    DocJson = {[
-        {<<"_attachments">>, {[
-            {<<"readme.txt">>, {[
-                {<<"content_type">>, <<"text/plain">>},
-                {<<"data">>, base64:encode(test_text_data())}
-            ]}
-        }]}}
-    ]},
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc3",
-        [{"Content-Type", "application/json"}],
-        put,
-        ejson:encode(DocJson)),
-    etap:is(Code, 201, "Created text attachment using the non-standalone api"),
-    ok.
-
-% create a png attachment using the non-standalone attachment api
-create_2nd_png_att() ->
-    DocJson = {[
-        {<<"_attachments">>, {[
-            {<<"icon.png">>, {[
-                {<<"content_type">>, <<"image/png">>},
-                {<<"data">>, base64:encode(test_png_data())}
-            ]}
-        }]}}
-    ]},
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc4",
-        [{"Content-Type", "application/json"}],
-        put,
-        ejson:encode(DocJson)),
-    etap:is(Code, 201, "Created png attachment using the non-standalone api"),
-    ok.
-
-create_already_compressed_att(DocUri, AttName) ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}],
-        put,
-        zlib:gzip(test_text_data())),
-    etap:is(
-        Code,
-        201,
-        "Created already compressed attachment using the standalone api"
-    ),
-    ok.
-
-tests_for_1st_text_att() ->
-    test_get_1st_text_att_with_accept_encoding_gzip(),
-    test_get_1st_text_att_without_accept_encoding_header(),
-    test_get_1st_text_att_with_accept_encoding_deflate(),
-    test_get_1st_text_att_with_accept_encoding_deflate_only(),
-    test_get_doc_with_1st_text_att(),
-    test_1st_text_att_stub().
-
-tests_for_1st_png_att() ->
-    test_get_1st_png_att_without_accept_encoding_header(),
-    test_get_1st_png_att_with_accept_encoding_gzip(),
-    test_get_1st_png_att_with_accept_encoding_deflate(),
-    test_get_doc_with_1st_png_att(),
-    test_1st_png_att_stub().
-
-tests_for_2nd_text_att() ->
-    test_get_2nd_text_att_with_accept_encoding_gzip(),
-    test_get_2nd_text_att_without_accept_encoding_header(),
-    test_get_doc_with_2nd_text_att(),
-    test_2nd_text_att_stub().
-
-tests_for_2nd_png_att() ->
-    test_get_2nd_png_att_without_accept_encoding_header(),
-    test_get_2nd_png_att_with_accept_encoding_gzip(),
-    test_get_doc_with_2nd_png_att(),
-    test_2nd_png_att_stub().
-
-test_get_1st_text_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, true, "received body is gziped"),
-    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
-    etap:is(
-        Uncompressed,
-        test_text_data(),
-        "received data for the 1st text attachment is ok"
-    ),
-    ok.
-
-test_get_1st_text_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_text_data(),
-        "received data for the 1st text attachment is ok"
-    ),
-    ok.
-
-test_get_1st_text_att_with_accept_encoding_deflate() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Accept-Encoding", "deflate"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    Deflated = lists:member({"Content-Encoding", "deflate"}, Headers),
-    etap:is(Deflated, false, "received body is not deflated"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_text_data(),
-        "received data for the 1st text attachment is ok"
-    ),
-    ok.
-
-test_get_1st_text_att_with_accept_encoding_deflate_only() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Accept-Encoding", "deflate, *;q=0"}],
-        get),
-    etap:is(
-        Code,
-        406,
-        "HTTP response code is 406 for an unsupported content encoding request"
-    ),
-    ok.
-
-test_get_1st_png_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Encoding = couch_util:get_value("Content-Encoding", Headers),
-    etap:is(Encoding, undefined, "received body is not gziped"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_png_data(),
-        "received data for the 1st png attachment is ok"
-    ),
-    ok.
-
-test_get_1st_png_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Encoding = couch_util:get_value("Content-Encoding", Headers),
-    etap:is(Encoding, undefined, "received body is not gziped"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_png_data(),
-        "received data for the 1st png attachment is ok"
-    ),
-    ok.
-
-test_get_1st_png_att_with_accept_encoding_deflate() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [{"Accept-Encoding", "deflate"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Encoding = couch_util:get_value("Content-Encoding", Headers),
-    etap:is(Encoding, undefined, "received body is in identity form"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_png_data(),
-        "received data for the 1st png attachment is ok"
-    ),
-    ok.
-
-test_get_doc_with_1st_text_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    TextAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttType = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"content_type">>]
-    ),
-    TextAttData = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"data">>]
-    ),
-    etap:is(
-        TextAttType,
-        <<"text/plain">>,
-        "1st text attachment has type text/plain"
-    ),
-    %% check the attachment's data is the base64 encoding of the plain text
-    %% and not the base64 encoding of the gziped plain text
-    etap:is(
-        TextAttData,
-        base64:encode(test_text_data()),
-        "1st text attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_1st_text_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {TextAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
-    etap:is(
-        TextAttLength,
-        byte_size(test_text_data()),
-        "1st text attachment stub length matches the uncompressed length"
-    ),
-    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
-    etap:is(
-        TextAttEncoding,
-        <<"gzip">>,
-        "1st text attachment stub has the encoding field set to gzip"
-    ),
-    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
-    etap:is(
-        TextAttEncLength,
-        iolist_size(zlib:gzip(test_text_data())),
-        "1st text attachment stub encoded_length matches the compressed length"
-    ),
-    ok.
-
-test_get_doc_with_1st_png_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    PngAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttType = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"content_type">>]
-    ),
-    PngAttData = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"data">>]
-    ),
-    etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
-    etap:is(
-        PngAttData,
-        base64:encode(test_png_data()),
-        "1st png attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_1st_png_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2?att_encoding_info=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {PngAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
-    etap:is(
-        PngAttLength,
-        byte_size(test_png_data()),
-        "1st png attachment stub length matches the uncompressed length"
-    ),
-    PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
-    etap:is(
-        PngEncoding,
-        undefined,
-        "1st png attachment stub doesn't have an encoding field"
-    ),
-    PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
-    etap:is(
-        PngEncLength,
-        undefined,
-        "1st png attachment stub doesn't have an encoded_length field"
-    ),
-    ok.
-
-test_get_2nd_text_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3/readme.txt",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, true, "received body is gziped"),
-    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
-    etap:is(
-        Uncompressed,
-        test_text_data(),
-        "received data for the 2nd text attachment is ok"
-    ),
-    ok.
-
-test_get_2nd_text_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3/readme.txt",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_text_data(),
-        "received data for the 2nd text attachment is ok"
-    ),
-    ok.
-
-test_get_2nd_png_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4/icon.png",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_png_data(),
-        "received data for the 2nd png attachment is ok"
-    ),
-    ok.
-
-test_get_2nd_png_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4/icon.png",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_png_data(),
-        "received data for the 2nd png attachment is ok"
-    ),
-    ok.
-
-test_get_doc_with_2nd_text_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    TextAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttType = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"content_type">>]
-    ),
-    TextAttData = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"data">>]
-    ),
-    etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"),
-    %% check the attachment's data is the base64 encoding of the plain text
-    %% and not the base64 encoding of the gziped plain text
-    etap:is(
-        TextAttData,
-        base64:encode(test_text_data()),
-        "2nd text attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_2nd_text_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {TextAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
-    etap:is(
-        TextAttLength,
-        byte_size(test_text_data()),
-        "2nd text attachment stub length matches the uncompressed length"
-    ),
-    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
-    etap:is(
-        TextAttEncoding,
-        <<"gzip">>,
-        "2nd text attachment stub has the encoding field set to gzip"
-    ),
-    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
-    etap:is(
-        TextAttEncLength,
-        iolist_size(zlib:gzip(test_text_data())),
-        "2nd text attachment stub encoded_length matches the compressed length"
-    ),
-    ok.
-
-test_get_doc_with_2nd_png_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    PngAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttType = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"content_type">>]
-    ),
-    PngAttData = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"data">>]
-    ),
-    etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
-    etap:is(
-        PngAttData,
-        base64:encode(test_png_data()),
-        "2nd png attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_2nd_png_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {PngAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
-    etap:is(
-        PngAttLength,
-        byte_size(test_png_data()),
-        "2nd png attachment stub length matches the uncompressed length"
-    ),
-    PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
-    etap:is(
-        PngEncoding,
-        undefined,
-        "2nd png attachment stub doesn't have an encoding field"
-    ),
-    PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
-    etap:is(
-        PngEncLength,
-        undefined,
-        "2nd png attachment stub doesn't have an encoded_length field"
-    ),
-    ok.
-
-test_already_compressed_att(DocUri, AttName) ->
-    test_get_already_compressed_att_with_accept_gzip(DocUri, AttName),
-    test_get_already_compressed_att_without_accept(DocUri, AttName),
-    test_get_already_compressed_att_stub(DocUri, AttName).
-
-test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) ->
-    {ok, Code, Headers, Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, true, "received body is gziped"),
-    etap:is(
-        Body,
-        zlib:gzip(test_text_data()),
-        "received data for the already compressed attachment is ok"
-    ),
-    ok.
-
-test_get_already_compressed_att_without_accept(DocUri, AttName) ->
-    {ok, Code, Headers, Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_text_data(),
-        "received data for the already compressed attachment is ok"
-    ),
-    ok.
-
-test_get_already_compressed_att_stub(DocUri, AttName) ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        DocUri ++ "?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {AttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, iolist_to_binary(AttName)]
-    ),
-    AttLength = couch_util:get_value(<<"length">>, AttJson),
-    etap:is(
-        AttLength,
-        iolist_size((zlib:gzip(test_text_data()))),
-        "Already compressed attachment stub length matches the "
-        "compressed length"
-    ),
-    Encoding = couch_util:get_value(<<"encoding">>, AttJson),
-    etap:is(
-        Encoding,
-        <<"gzip">>,
-        "Already compressed attachment stub has the encoding field set to gzip"
-    ),
-    EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
-    etap:is(
-        EncLength,
-        AttLength,
-        "Already compressed attachment stub encoded_length matches the "
-        "length field value"
-    ),
-    ok.
-
-test_create_already_compressed_att_with_invalid_content_encoding(
-    DocUri, AttName, AttData, Encoding) ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [{"Content-Encoding", Encoding}, {"Content-Type", "text/plain"}],
-        put,
-        AttData),
-    etap:is(
-        Code,
-        415,
-        "Couldn't create an already compressed attachment using the "
-        "unsupported encoding '" ++ Encoding ++ "'"
-    ),
-    ok.
-
-test_compressible_type_with_parameters() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc5/readme.txt",
-        [{"Content-Type", "text/plain; charset=UTF-8"}],
-        put,
-        test_text_data()),
-    etap:is(Code, 201, "Created text attachment with MIME type "
-        "'text/plain; charset=UTF-8' using the standalone api"),
-    {ok, Code2, Headers2, Body} = test_util:request(
-        db_url() ++ "/testdoc5/readme.txt",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code2, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers2),
-    etap:is(Gziped, true, "received body is gziped"),
-    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
-    etap:is(Uncompressed, test_text_data(), "received data is gzipped"),
-    {ok, Code3, _Headers3, Body3} = test_util:request(
-        db_url() ++ "/testdoc5?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code3, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body3),
-    {TextAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
-    etap:is(
-        TextAttLength,
-        byte_size(test_text_data()),
-        "text attachment stub length matches the uncompressed length"
-    ),
-    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
-    etap:is(
-        TextAttEncoding,
-        <<"gzip">>,
-        "text attachment stub has the encoding field set to gzip"
-    ),
-    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
-    etap:is(
-        TextAttEncLength,
-        iolist_size(zlib:gzip(test_text_data())),
-        "text attachment stub encoded_length matches the compressed length"
-    ),
-    ok.
-
-test_png_data() ->
-    {ok, Data} = file:read_file(
-        test_util:source_file("share/www/image/logo.png")
-    ),
-    Data.
-
-test_text_data() ->
-    {ok, Data} = file:read_file(
-        test_util:source_file("README.rst")
-    ),
-    Data.


[09/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/076-file-compression.t
----------------------------------------------------------------------
diff --git a/src/test/etap/076-file-compression.t b/src/test/etap/076-file-compression.t
new file mode 100755
index 0000000..2929230
--- /dev/null
+++ b/src/test/etap/076-file-compression.t
@@ -0,0 +1,186 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+test_db_name() -> <<"couch_test_file_compression">>.
+ddoc_id() -> <<"_design/test">>.
+num_docs() -> 5000.
+
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(10),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    couch_config:set("couchdb", "file_compression", "none", false),
+
+    create_database(),
+    compact_db(),
+    compact_view(),
+    DbDiskSize1 = db_disk_size(),
+    ViewDiskSize1 = view_disk_size(),
+
+    couch_config:set("couchdb", "file_compression", "snappy", false),
+    compact_db(),
+    compact_view(),
+    DbDiskSize2 = db_disk_size(),
+    ViewDiskSize2 = view_disk_size(),
+
+    etap:is(DbDiskSize2 < DbDiskSize1, true, "Database disk size decreased"),
+    etap:is(ViewDiskSize2 < ViewDiskSize1, true, "Index disk size decreased"),
+
+    couch_config:set("couchdb", "file_compression", "deflate_9", false),
+    compact_db(),
+    compact_view(),
+    DbDiskSize3 = db_disk_size(),
+    ViewDiskSize3 = view_disk_size(),
+
+    etap:is(DbDiskSize3 < DbDiskSize2, true, "Database disk size decreased again"),
+    etap:is(ViewDiskSize3 < ViewDiskSize2, true, "Index disk size decreased again"),
+
+    couch_config:set("couchdb", "file_compression", "deflate_1", false),
+    compact_db(),
+    compact_view(),
+    DbDiskSize4 = db_disk_size(),
+    ViewDiskSize4 = view_disk_size(),
+
+    etap:is(DbDiskSize4 > DbDiskSize3, true, "Database disk size increased"),
+    etap:is(ViewDiskSize4 > ViewDiskSize3, true, "Index disk size increased"),
+
+    couch_config:set("couchdb", "file_compression", "snappy", false),
+    compact_db(),
+    compact_view(),
+    DbDiskSize5 = db_disk_size(),
+    ViewDiskSize5 = view_disk_size(),
+
+    etap:is(DbDiskSize5 > DbDiskSize4, true, "Database disk size increased again"),
+    etap:is(ViewDiskSize5 > ViewDiskSize4, true, "Index disk size increased again"),
+
+    couch_config:set("couchdb", "file_compression", "none", false),
+    compact_db(),
+    compact_view(),
+    DbDiskSize6 = db_disk_size(),
+    ViewDiskSize6 = view_disk_size(),
+
+    etap:is(DbDiskSize6 > DbDiskSize5, true, "Database disk size increased again"),
+    etap:is(ViewDiskSize6 > ViewDiskSize5, true, "Index disk size increased again"),
+
+    delete_db(),
+    couch_server_sup:stop(),
+    ok.
+
+
+create_database() ->
+    {ok, Db} = couch_db:create(
+        test_db_name(),
+        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]),
+    ok = populate_db(Db, num_docs()),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, ddoc_id()},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+                {<<"view1">>, {[
+                    {<<"map">>, <<"function(doc) { emit(doc._id, doc.string); }">>}
+                ]}}
+            ]}
+        }
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, []),
+    refresh_index(),
+    ok = couch_db:close(Db).
+
+
+populate_db(_Db, NumDocs) when NumDocs =< 0 ->
+    ok;
+populate_db(Db, NumDocs) ->
+    Docs = lists:map(
+        fun(_) ->
+            couch_doc:from_json_obj({[
+                {<<"_id">>, couch_uuids:random()},
+                {<<"string">>, list_to_binary(lists:duplicate(1000, $X))}
+            ]})
+        end,
+        lists:seq(1, 500)),
+    {ok, _} = couch_db:update_docs(Db, Docs, []),
+    populate_db(Db, NumDocs - 500).
+
+
+refresh_index() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, DDoc} = couch_db:open_doc(Db, ddoc_id(), [ejson_body]),
+    couch_mrview:query_view(Db, DDoc, <<"view1">>, [{stale, false}]),
+    ok = couch_db:close(Db).
+
+
+compact_db() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, CompactPid} = couch_db:start_compact(Db),
+    MonRef = erlang:monitor(process, CompactPid),
+    receive
+    {'DOWN', MonRef, process, CompactPid, normal} ->
+        ok;
+    {'DOWN', MonRef, process, CompactPid, Reason} ->
+        etap:bail("Error compacting database: " ++ couch_util:to_list(Reason))
+    after 120000 ->
+        etap:bail("Timeout waiting for database compaction")
+    end,
+    ok = couch_db:close(Db).
+
+
+compact_view() ->
+    {ok, MonRef} = couch_mrview:compact(test_db_name(), ddoc_id(), [monitor]),
+    receive
+    {'DOWN', MonRef, process, _CompactPid, normal} ->
+        ok;
+    {'DOWN', MonRef, process, _CompactPid, Reason} ->
+        etap:bail("Error compacting view group: " ++ couch_util:to_list(Reason))
+    after 120000 ->
+        etap:bail("Timeout waiting for view group compaction")
+    end.
+
+
+db_disk_size() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, Info} = couch_db:get_db_info(Db),
+    ok = couch_db:close(Db),
+    couch_util:get_value(disk_size, Info).
+
+
+view_disk_size() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, DDoc} = couch_db:open_doc(Db, ddoc_id(), [ejson_body]),
+    {ok, Info} = couch_mrview:get_info(Db, DDoc),
+    ok = couch_db:close(Db),
+    couch_util:get_value(disk_size, Info).
+
+
+delete_db() ->
+    ok = couch_server:delete(
+        test_db_name(), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/077-couch-db-fast-db-delete-create.t
----------------------------------------------------------------------
diff --git a/src/test/etap/077-couch-db-fast-db-delete-create.t b/src/test/etap/077-couch-db-fast-db-delete-create.t
new file mode 100644
index 0000000..2026698
--- /dev/null
+++ b/src/test/etap/077-couch-db-fast-db-delete-create.t
@@ -0,0 +1,61 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+
+    test_util:init_code_path(),
+
+    etap:plan(unknown),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            Msg = io_lib:format("Test died abnormally: ~p", [Other]),
+            etap:diag(Msg),
+            etap:bail(Msg)
+        end,
+    ok.
+
+loop(0) ->
+    ok;
+loop(N) ->
+    ok = cycle(),
+    loop(N - 1).
+
+cycle() ->
+    ok = couch_server:delete(<<"etap-test-db">>, []),
+    {ok, _Db} = couch_db:create(<<"etap-test-db">>, []),
+    ok.
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+
+    {ok, _Db} = couch_db:create(<<"etap-test-db">>, []),
+
+    ok = loop(1),
+    ok = loop(10),
+    ok = loop(100),
+    ok = loop(1000),
+
+    % for more thorough testing:
+    % ok = loop(10000),
+    % ok = loop(100000),
+    % ok = loop(1000000),
+    % ok = loop(10000000),
+
+    ok = couch_server:delete(<<"etap-test-db">>, []),
+
+    etap:is(true, true, "lots of creating and deleting of a database"),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/080-config-get-set.t
----------------------------------------------------------------------
diff --git a/src/test/etap/080-config-get-set.t b/src/test/etap/080-config-get-set.t
new file mode 100755
index 0000000..94a9cba
--- /dev/null
+++ b/src/test/etap/080-config-get-set.t
@@ -0,0 +1,128 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+default_config() ->
+    test_util:build_file("etc/couchdb/default_dev.ini").
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(12),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    % start couch_config with default
+    couch_config:start_link([default_config()]),
+
+
+    % Check that we can get values
+
+
+    etap:fun_is(
+        fun(List) -> length(List) > 0 end,
+        couch_config:all(),
+        "Data was loaded from the INI file."
+    ),
+
+    etap:fun_is(
+        fun(List) -> length(List) > 0 end,
+        couch_config:get("daemons"),
+        "There are settings in the [daemons] section of the INI file."
+    ),
+
+    etap:is(
+        couch_config:get("httpd_design_handlers", "_view"),
+        "{couch_mrview_http, handle_view_req}",
+        "The {httpd_design_handlers, view} is the expected default."
+    ),
+
+    etap:is(
+        couch_config:get("httpd", "foo", "bar"),
+        "bar",
+        "Returns the default when key doesn't exist in config."
+    ),
+
+    etap:is(
+        couch_config:get("httpd", "foo"),
+        undefined,
+        "The default default is the atom 'undefined'."
+    ),
+
+    etap:is(
+        couch_config:get("httpd", "port", "bar"),
+        "5984",
+        "Only returns the default when the config setting does not exist."
+    ),
+
+
+    % Check that setting values works.
+
+
+    ok = couch_config:set("log", "level", "severe", false),
+
+    etap:is(
+        couch_config:get("log", "level"),
+        "severe",
+        "Non persisted changes take effect."
+    ),
+
+    etap:is(
+        couch_config:get("new_section", "bizzle"),
+        undefined,
+        "Section 'new_section' does not exist."
+    ),
+
+    ok = couch_config:set("new_section", "bizzle", "bang", false),
+
+    etap:is(
+        couch_config:get("new_section", "bizzle"),
+        "bang",
+        "New section 'new_section' was created for a new key/value pair."
+    ),
+
+
+    % Check that deleting works
+
+
+    ok = couch_config:delete("new_section", "bizzle", false),
+    etap:is(
+        couch_config:get("new_section", "bizzle"),
+        undefined,
+        "Deleting sets the value to \"\""
+    ),
+
+
+    % Check ge/set/delete binary strings
+
+    ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false),
+    etap:is(
+        couch_config:get(<<"foo">>, <<"bar">>),
+        <<"baz">>,
+        "Can get and set with binary section and key values."
+    ),
+    ok = couch_config:delete(<<"foo">>, <<"bar">>, false),
+    etap:is(
+        couch_config:get(<<"foo">>, <<"bar">>),
+        undefined,
+        "Deleting with binary section/key pairs sets the value to \"\""
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/081-config-override.1.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/081-config-override.1.ini b/src/test/etap/081-config-override.1.ini
new file mode 100644
index 0000000..55451da
--- /dev/null
+++ b/src/test/etap/081-config-override.1.ini
@@ -0,0 +1,22 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[couchdb]
+max_dbs_open=10
+
+[httpd]
+port=4895

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/081-config-override.2.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/081-config-override.2.ini b/src/test/etap/081-config-override.2.ini
new file mode 100644
index 0000000..5f46357
--- /dev/null
+++ b/src/test/etap/081-config-override.2.ini
@@ -0,0 +1,22 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[httpd]
+port = 80
+
+[fizbang]
+unicode = normalized

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/081-config-override.t
----------------------------------------------------------------------
diff --git a/src/test/etap/081-config-override.t b/src/test/etap/081-config-override.t
new file mode 100755
index 0000000..01f8b4c
--- /dev/null
+++ b/src/test/etap/081-config-override.t
@@ -0,0 +1,212 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+default_config() ->
+    test_util:build_file("etc/couchdb/default_dev.ini").
+
+local_config_1() ->
+    test_util:source_file("test/etap/081-config-override.1.ini").
+
+local_config_2() ->
+    test_util:source_file("test/etap/081-config-override.2.ini").
+
+local_config_write() ->
+    test_util:build_file("test/etap/temp.081").
+
+% Run tests and wait for the config gen_server to shutdown.
+run_tests(IniFiles, Tests) ->
+    {ok, Pid} = couch_config:start_link(IniFiles),
+    erlang:monitor(process, Pid),
+    Tests(),
+    couch_config:stop(),
+    receive
+        {'DOWN', _, _, Pid, _} -> ok;
+        _Other -> etap:diag("OTHER: ~p~n", [_Other])
+    after
+        1000 -> throw({timeout_error, config_stop})
+    end.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(17),
+
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+
+    CheckStartStop = fun() -> ok end,
+    run_tests([default_config()], CheckStartStop),
+
+    CheckDefaults = fun() ->
+        etap:is(
+            couch_config:get("couchdb", "max_dbs_open"),
+            "100",
+            "{couchdb, max_dbs_open} is 100 by defualt."
+        ),
+
+        etap:is(
+            couch_config:get("httpd","port"),
+            "5984",
+            "{httpd, port} is 5984 by default"
+        ),
+
+        etap:is(
+            couch_config:get("fizbang", "unicode"),
+            undefined,
+            "{fizbang, unicode} is undefined by default"
+        )
+    end,
+
+    run_tests([default_config()], CheckDefaults),
+
+
+    % Check that subsequent files override values appropriately
+
+    CheckOverride = fun() ->
+        etap:is(
+            couch_config:get("couchdb", "max_dbs_open"),
+            "10",
+            "{couchdb, max_dbs_open} was overriden with the value 10"
+        ),
+
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "4895",
+            "{httpd, port} was overriden with the value 4895"
+        )
+    end,
+
+    run_tests([default_config(), local_config_1()], CheckOverride),
+
+
+    % Check that overrides can create new sections
+
+    CheckOverride2 = fun() ->
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "80",
+            "{httpd, port} is overriden with the value 80"
+        ),
+
+        etap:is(
+            couch_config:get("fizbang", "unicode"),
+            "normalized",
+            "{fizbang, unicode} was created by override INI file"
+        )
+    end,
+
+    run_tests([default_config(), local_config_2()], CheckOverride2),
+
+
+    % Check that values can be overriden multiple times
+
+    CheckOverride3 = fun() ->
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "80",
+            "{httpd, port} value was taken from the last specified INI file."
+        )
+    end,
+
+    run_tests(
+        [default_config(), local_config_1(), local_config_2()],
+        CheckOverride3
+    ),
+
+    % Check persistence to last file.
+
+    % Empty the file in case it exists.
+    {ok, Fd} = file:open(local_config_write(), write),
+    ok = file:truncate(Fd),
+    ok = file:close(Fd),
+
+    % Open and write a value
+    CheckCanWrite = fun() ->
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "5984",
+            "{httpd, port} is still 5984 by default"
+        ),
+
+        etap:is(
+            couch_config:set("httpd", "port", "8080"),
+            ok,
+            "Writing {httpd, port} is kosher."
+        ),
+
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "8080",
+            "{httpd, port} was updated to 8080 successfully."
+        ),
+
+        etap:is(
+            couch_config:delete("httpd", "bind_address"),
+            ok,
+            "Deleting {httpd, bind_address} succeeds"
+        ),
+
+        etap:is(
+            couch_config:get("httpd", "bind_address"),
+            undefined,
+            "{httpd, bind_address} was actually deleted."
+        )
+    end,
+
+    run_tests([default_config(), local_config_write()], CheckCanWrite),
+
+    % Open and check where we don't expect persistence.
+
+    CheckDidntWrite = fun() ->
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "5984",
+            "{httpd, port} was not persisted to the primary INI file."
+        ),
+
+        etap:is(
+            couch_config:get("httpd", "bind_address"),
+            "127.0.0.1",
+            "{httpd, bind_address} was not deleted form the primary INI file."
+        )
+    end,
+
+    run_tests([default_config()], CheckDidntWrite),
+
+    % Open and check we have only the persistence we expect.
+    CheckDidWrite = fun() ->
+        etap:is(
+            couch_config:get("httpd", "port"),
+            "8080",
+            "{httpd, port} is still 8080 after reopening the config."
+        ),
+
+        etap:is(
+            couch_config:get("httpd", "bind_address"),
+            undefined,
+            "{httpd, bind_address} is still \"\" after reopening."
+        )
+    end,
+
+    run_tests([local_config_write()], CheckDidWrite),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/082-config-register.t
----------------------------------------------------------------------
diff --git a/src/test/etap/082-config-register.t b/src/test/etap/082-config-register.t
new file mode 100755
index 0000000..191ba8f
--- /dev/null
+++ b/src/test/etap/082-config-register.t
@@ -0,0 +1,94 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+default_config() ->
+    test_util:build_file("etc/couchdb/default_dev.ini").
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(5),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link([default_config()]),
+
+    etap:is(
+        couch_config:get("httpd", "port"),
+        "5984",
+        "{httpd, port} is 5984 by default."
+    ),
+
+    ok = couch_config:set("httpd", "port", "4895", false),
+
+    etap:is(
+        couch_config:get("httpd", "port"),
+        "4895",
+        "{httpd, port} changed to 4895"
+    ),
+
+    SentinelFunc = fun() ->
+        % Ping/Pong to make sure we wait for this
+        % process to die
+        receive {ping, From} -> From ! pong end
+    end,
+    SentinelPid = spawn(SentinelFunc),
+
+    couch_config:register(
+        fun("httpd", "port", Value) ->
+            etap:is(Value, "8080", "Registered function got notification.")
+        end,
+        SentinelPid
+    ),
+
+    ok = couch_config:set("httpd", "port", "8080", false),
+
+    % Implicitly checking that we *don't* call the function
+    etap:is(
+        couch_config:get("httpd", "bind_address"),
+        "127.0.0.1",
+        "{httpd, bind_address} is not '0.0.0.0'"
+    ),
+    ok = couch_config:set("httpd", "bind_address", "0.0.0.0", false),
+
+    % Ping-Pong kill process
+    SentinelPid ! {ping, self()},
+    receive
+        _Any -> ok
+    after 1000 ->
+        throw({timeout_error, registered_pid})
+    end,
+
+    ok = couch_config:set("httpd", "port", "80", false),
+    etap:is(
+        couch_config:get("httpd", "port"),
+        "80",
+        "Implicitly test that the function got de-registered"
+    ),
+
+    % test passing of Persist flag
+    couch_config:register(
+        fun("httpd", _, _, Persist) ->
+            etap:is(Persist, false)
+        end),
+    ok = couch_config:set("httpd", "port", "80", false),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/083-config-no-files.t
----------------------------------------------------------------------
diff --git a/src/test/etap/083-config-no-files.t b/src/test/etap/083-config-no-files.t
new file mode 100755
index 0000000..0ce38e6
--- /dev/null
+++ b/src/test/etap/083-config-no-files.t
@@ -0,0 +1,53 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(3),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link([]),
+
+    etap:fun_is(
+        fun(KVPairs) -> length(KVPairs) == 0 end,
+        couch_config:all(),
+        "No INI files specified returns 0 key/value pairs."
+    ),
+
+    ok = couch_config:set("httpd", "port", "80", false),
+
+    etap:is(
+        couch_config:get("httpd", "port"),
+        "80",
+        "Created a new non-persisted k/v pair."
+    ),
+
+    ok = couch_config:set("httpd", "bind_address", "127.0.0.1"),
+    etap:is(
+        couch_config:get("httpd", "bind_address"),
+        "127.0.0.1",
+        "Asking for a persistent key/value pair doesn't choke."
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/090-task-status.t
----------------------------------------------------------------------
diff --git a/src/test/etap/090-task-status.t b/src/test/etap/090-task-status.t
new file mode 100755
index 0000000..23115bd
--- /dev/null
+++ b/src/test/etap/090-task-status.t
@@ -0,0 +1,279 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(28),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+get_task_prop(Pid, Prop) ->
+    From = list_to_binary(pid_to_list(Pid)),
+    Element = lists:foldl(
+        fun(PropList,Acc) ->
+            case couch_util:get_value(pid,PropList) of
+                From ->
+                    [PropList | Acc];
+                _ ->
+                    []
+            end
+        end,
+        [], couch_task_status:all()
+    ),
+    case couch_util:get_value(Prop, hd(Element), nil) of
+    nil ->
+        etap:bail("Could not get property '" ++ couch_util:to_list(Prop) ++
+            "' for task " ++ pid_to_list(Pid));
+    Value ->
+        Value
+    end.
+
+
+loop() ->
+    receive
+    {add, Props, From} ->
+        Resp = couch_task_status:add_task(Props),
+        From ! {ok, self(), Resp},
+        loop();
+    {update, Props, From} ->
+        Resp = couch_task_status:update(Props),
+        From ! {ok, self(), Resp},
+        loop();
+    {update_frequency, Msecs, From} ->
+        Resp = couch_task_status:set_update_frequency(Msecs),
+        From ! {ok, self(), Resp},
+        loop();
+    {done, From} ->
+        From ! {ok, self(), ok}
+    end.
+
+call(Pid, Command) ->
+    Pid ! {Command, self()},
+    wait(Pid).
+
+call(Pid, Command, Arg) ->
+    Pid ! {Command, Arg, self()},
+    wait(Pid).
+
+wait(Pid) ->
+    receive
+        {ok, Pid, Msg} -> Msg
+    after 1000 ->
+        throw(timeout_error)
+    end.
+
+test() ->
+    {ok, TaskStatusPid} = couch_task_status:start_link(),
+
+    TaskUpdater = fun() -> loop() end,
+    % create three updaters
+    Pid1 = spawn(TaskUpdater),
+    Pid2 = spawn(TaskUpdater),
+    Pid3 = spawn(TaskUpdater),
+
+    ok = call(Pid1, add, [{type, replication}, {progress, 0}]),
+    etap:is(
+        length(couch_task_status:all()),
+        1,
+        "Started a task"
+    ),
+    Task1StartTime = get_task_prop(Pid1, started_on),
+    etap:is(
+        is_integer(Task1StartTime),
+        true,
+        "Task start time is defined."
+    ),
+    etap:is(
+        get_task_prop(Pid1, updated_on),
+        Task1StartTime,
+        "Task's start time is the same as the update time before an update."
+    ),
+
+    etap:is(
+        call(Pid1, add, [{type, compaction}, {progress, 0}]),
+        {add_task_error, already_registered},
+        "Unable to register multiple tasks for a single Pid."
+    ),
+
+    etap:is(
+        get_task_prop(Pid1, type),
+        replication,
+        "Task type is 'replication'."
+    ),
+    etap:is(
+        get_task_prop(Pid1, progress),
+        0,
+        "Task progress is 0."
+    ),
+
+    ok = timer:sleep(1000),
+    call(Pid1, update, [{progress, 25}]),
+    etap:is(
+        get_task_prop(Pid1, progress),
+        25,
+        "Task progress is 25."
+    ),
+    etap:is(
+        get_task_prop(Pid1, updated_on) > Task1StartTime,
+        true,
+        "Task's last update time has increased after an update."
+    ),
+
+    call(Pid2, add, [{type, compaction}, {progress, 0}]),
+    etap:is(
+        length(couch_task_status:all()),
+        2,
+        "Started a second task."
+    ),
+    Task2StartTime = get_task_prop(Pid2, started_on),
+    etap:is(
+        is_integer(Task2StartTime),
+        true,
+        "Second task's start time is defined."
+    ),
+    etap:is(
+        get_task_prop(Pid2, updated_on),
+        Task2StartTime,
+        "Second task's start time is the same as the update time before an update."
+    ),
+
+    etap:is(
+        get_task_prop(Pid2, type),
+        compaction,
+        "Second task's type is 'compaction'."
+    ),
+    etap:is(
+        get_task_prop(Pid2, progress),
+        0,
+        "Second task's progress is 0."
+    ),
+
+    ok = timer:sleep(1000),
+    call(Pid2, update, [{progress, 33}]),
+    etap:is(
+        get_task_prop(Pid2, progress),
+        33,
+        "Second task's progress updated to 33."
+    ),
+    etap:is(
+        get_task_prop(Pid2, updated_on) > Task2StartTime,
+        true,
+        "Second task's last update time has increased after an update."
+    ),
+
+    call(Pid3, add, [{type, indexer}, {progress, 0}]),
+    etap:is(
+        length(couch_task_status:all()),
+        3,
+        "Registered a third task."
+    ),
+    Task3StartTime = get_task_prop(Pid3, started_on),
+    etap:is(
+        is_integer(Task3StartTime),
+        true,
+        "Third task's start time is defined."
+    ),
+    etap:is(
+        get_task_prop(Pid3, updated_on),
+        Task3StartTime,
+        "Third task's start time is the same as the update time before an update."
+    ),
+
+    etap:is(
+        get_task_prop(Pid3, type),
+        indexer,
+        "Third task's type is 'indexer'."
+    ),
+    etap:is(
+        get_task_prop(Pid3, progress),
+        0,
+        "Third task's progress is 0."
+    ),
+
+    ok = timer:sleep(1000),
+    call(Pid3, update, [{progress, 50}]),
+    etap:is(
+        get_task_prop(Pid3, progress),
+        50,
+        "Third task's progress updated to 50."
+    ),
+    etap:is(
+        get_task_prop(Pid3, updated_on) > Task3StartTime,
+        true,
+        "Third task's last update time has increased after an update."
+    ),
+
+    call(Pid3, update_frequency, 500),
+    call(Pid3, update, [{progress, 66}]),
+    etap:is(
+        get_task_prop(Pid3, progress),
+        66,
+        "Third task's progress updated to 66."
+    ),
+
+    call(Pid3, update, [{progress, 67}]),
+    etap:is(
+        get_task_prop(Pid3, progress),
+        66,
+        "Task update dropped because of frequency limit."
+    ),
+
+    call(Pid3, update_frequency, 0),
+    call(Pid3, update, [{progress, 77}]),
+    etap:is(
+        get_task_prop(Pid3, progress),
+        77,
+        "Task updated after reseting frequency limit."
+    ),
+
+
+    call(Pid1, done),
+    etap:is(
+        length(couch_task_status:all()),
+        2,
+        "First task finished."
+    ),
+
+    call(Pid2, done),
+    etap:is(
+        length(couch_task_status:all()),
+        1,
+        "Second task finished."
+    ),
+
+    call(Pid3, done),
+    etap:is(
+        length(couch_task_status:all()),
+        0,
+        "Third task finished."
+    ),
+
+    erlang:monitor(process, TaskStatusPid),
+    couch_task_status:stop(),
+    receive
+        {'DOWN', _, _, TaskStatusPid, _} ->
+            ok
+    after
+        1000 ->
+            throw(timeout_error)
+    end,
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/100-ref-counter.t
----------------------------------------------------------------------
diff --git a/src/test/etap/100-ref-counter.t b/src/test/etap/100-ref-counter.t
new file mode 100755
index 0000000..8f996d0
--- /dev/null
+++ b/src/test/etap/100-ref-counter.t
@@ -0,0 +1,114 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(8),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+loop() ->
+    receive
+        close -> ok
+    end.
+
+wait() ->
+    receive
+        {'DOWN', _, _, _, _} -> ok
+    after 1000 ->
+        throw(timeout_error)
+    end.
+
+test() ->
+    {ok, RefCtr} = couch_ref_counter:start([]),
+
+    etap:is(
+        couch_ref_counter:count(RefCtr),
+        1,
+        "A ref_counter is initialized with the calling process as a referer."
+    ),
+
+    ChildPid1 = spawn(fun() -> loop() end),
+
+    % This is largely implicit in that nothing else breaks
+    % as ok is just returned from gen_server:cast()
+    etap:is(
+        couch_ref_counter:drop(RefCtr, ChildPid1),
+        ok,
+        "Dropping an unknown Pid is ignored."
+    ),
+
+    couch_ref_counter:add(RefCtr, ChildPid1),
+    etap:is(
+        couch_ref_counter:count(RefCtr),
+        2,
+        "Adding a Pid to the ref_counter increases it's count."
+    ),
+
+    couch_ref_counter:add(RefCtr, ChildPid1),
+    etap:is(
+        couch_ref_counter:count(RefCtr),
+        2,
+        "Readding the same Pid maintains the count but increments it's refs."
+    ),
+
+    couch_ref_counter:drop(RefCtr, ChildPid1),
+    etap:is(
+        couch_ref_counter:count(RefCtr),
+        2,
+        "Droping the doubly added Pid only removes a ref, not a referer."
+    ),
+
+    couch_ref_counter:drop(RefCtr, ChildPid1),
+    etap:is(
+        couch_ref_counter:count(RefCtr),
+        1,
+        "Dropping the second ref drops the referer."
+    ),
+
+    couch_ref_counter:add(RefCtr, ChildPid1),
+    etap:is(
+        couch_ref_counter:count(RefCtr),
+        2,
+        "Sanity checking that the Pid was re-added."
+    ),
+
+    erlang:monitor(process, ChildPid1),
+    ChildPid1 ! close,
+    wait(),
+    
+    CheckFun = fun
+        (Iter, nil) ->
+            case couch_ref_counter:count(RefCtr) of
+                1 -> Iter;
+                _ -> nil
+            end;
+        (_, Acc) ->
+            Acc
+    end,
+    Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)),
+    etap:isnt(
+        Result,
+        nil,
+        "The referer count was decremented automatically on process exit."
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/120-stats-collect.t
----------------------------------------------------------------------
diff --git a/src/test/etap/120-stats-collect.t b/src/test/etap/120-stats-collect.t
new file mode 100755
index 0000000..a30f9ac
--- /dev/null
+++ b/src/test/etap/120-stats-collect.t
@@ -0,0 +1,150 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(11),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    couch_stats_collector:start(),
+    ok = test_counters(),
+    ok = test_abs_values(),
+    ok = test_proc_counting(),
+    ok = test_all(),
+    ok.
+
+test_counters() ->
+    AddCount = fun() -> couch_stats_collector:increment(foo) end,
+    RemCount = fun() -> couch_stats_collector:decrement(foo) end,
+    repeat(AddCount, 100),
+    repeat(RemCount, 25),
+    repeat(AddCount, 10),
+    repeat(RemCount, 5),
+    etap:is(
+        couch_stats_collector:get(foo),
+        80,
+        "Incrememnt tracks correctly."
+    ),
+
+    repeat(RemCount, 80),
+    etap:is(
+        couch_stats_collector:get(foo),
+        0,
+        "Decremented to zaro."
+    ),
+    ok.
+
+test_abs_values() ->
+    lists:map(fun(Val) ->
+        couch_stats_collector:record(bar, Val)
+    end, lists:seq(1, 15)),
+    etap:is(
+        couch_stats_collector:get(bar),
+        lists:seq(1, 15),
+        "Absolute values are recorded correctly."
+    ),
+    
+    couch_stats_collector:clear(bar),
+    etap:is(
+        couch_stats_collector:get(bar),
+        nil,
+        "Absolute values are cleared correctly."
+    ),
+    ok.
+
+test_proc_counting() ->
+    Self = self(),
+    OnePid = spawn(fun() ->
+        couch_stats_collector:track_process_count(hoopla),
+        Self ! reporting,
+        receive sepuku -> ok end
+    end),
+    R1 = erlang:monitor(process, OnePid),
+    receive reporting -> ok end,
+    etap:is(
+        couch_stats_collector:get(hoopla),
+        1,
+        "track_process_count increments the counter."
+    ),
+    
+    TwicePid = spawn(fun() ->
+        couch_stats_collector:track_process_count(hoopla),
+        couch_stats_collector:track_process_count(hoopla),
+        Self ! reporting,
+        receive sepuku -> ok end
+    end),
+    R2 = erlang:monitor(process, TwicePid),
+    receive reporting -> ok end,
+    etap:is(
+        couch_stats_collector:get(hoopla),
+        3,
+        "track_process_count allows more than one incrememnt per Pid"
+    ),
+    
+    OnePid ! sepuku,
+    receive {'DOWN', R1, _, _, _} -> ok end,
+    timer:sleep(250),
+    etap:is(
+        couch_stats_collector:get(hoopla),
+        2,
+        "Process count is decremented when process exits."
+    ),
+    
+    TwicePid ! sepuku,
+    receive {'DOWN', R2, _, _, _} -> ok end,
+    timer:sleep(250),
+    etap:is(
+        couch_stats_collector:get(hoopla),
+        0,
+        "Process count is decremented for each call to track_process_count."
+    ),
+    ok.
+
+test_all() ->
+    couch_stats_collector:record(bar, 0.0),
+    couch_stats_collector:record(bar, 1.0),
+    etap:is(
+        lists:sort(couch_stats_collector:all()),
+        [ {bar,[1.0,0.0]}, {foo,0}, { hoopla,0} ],
+        "all/0 returns all counters and absolute values."
+    ),
+    
+    etap:is(
+        lists:sort(couch_stats_collector:all(incremental)),
+        [ {foo, 0}, {hoopla, 0} ],
+        "all/1 returns only the specified type."
+    ),
+    
+    couch_stats_collector:record(zing, 90),
+    etap:is(
+        lists:sort(couch_stats_collector:all(absolute)),
+        [ {bar,[1.0,0.0]}, {zing,"Z"} ],
+        "all/1 returns only the specified type."
+    ),
+    ok.
+
+repeat(_, 0) ->
+    ok;
+repeat(Fun, Count) ->
+    Fun(),
+    repeat(Fun, Count-1).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/121-stats-aggregates.cfg
----------------------------------------------------------------------
diff --git a/src/test/etap/121-stats-aggregates.cfg b/src/test/etap/121-stats-aggregates.cfg
new file mode 100644
index 0000000..30e475d
--- /dev/null
+++ b/src/test/etap/121-stats-aggregates.cfg
@@ -0,0 +1,19 @@
+% Licensed to the Apache Software Foundation (ASF) under one
+% or more contributor license agreements.  See the NOTICE file
+% distributed with this work for additional information
+% regarding copyright ownership.  The ASF licenses this file
+% to you under the Apache License, Version 2.0 (the
+% "License"); you may not use this file except in compliance
+% with the License.  You may obtain a copy of the License at
+% 
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing,
+% software distributed under the License is distributed on an
+% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+% KIND, either express or implied.  See the License for the
+% specific language governing permissions and limitations
+% under the License.
+
+{testing, stuff, "yay description"}.
+{number, '11', "randomosity"}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/121-stats-aggregates.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/121-stats-aggregates.ini b/src/test/etap/121-stats-aggregates.ini
new file mode 100644
index 0000000..cc5cd21
--- /dev/null
+++ b/src/test/etap/121-stats-aggregates.ini
@@ -0,0 +1,20 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[stats]
+rate = 10000000 ; We call collect_sample in testing
+samples = [0, 1]

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/121-stats-aggregates.t
----------------------------------------------------------------------
diff --git a/src/test/etap/121-stats-aggregates.t b/src/test/etap/121-stats-aggregates.t
new file mode 100755
index 0000000..d678aa9
--- /dev/null
+++ b/src/test/etap/121-stats-aggregates.t
@@ -0,0 +1,171 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+ini_file() ->
+    test_util:source_file("test/etap/121-stats-aggregates.ini").
+
+cfg_file() ->
+    test_util:source_file("test/etap/121-stats-aggregates.cfg").
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(17),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link([ini_file()]),
+    couch_stats_collector:start(),
+    couch_stats_aggregator:start(cfg_file()),
+    ok = test_all_empty(),
+    ok = test_get_empty(),
+    ok = test_count_stats(),
+    ok = test_abs_stats(),
+    ok.
+
+test_all_empty() ->
+    {Aggs} = couch_stats_aggregator:all(),
+
+    etap:is(length(Aggs), 2, "There are only two aggregate types in testing."),
+    etap:is(
+        couch_util:get_value(testing, Aggs),
+        {[{stuff, make_agg(<<"yay description">>,
+            null, null, null, null, null)}]},
+        "{testing, stuff} is empty at start."
+    ),
+    etap:is(
+        couch_util:get_value(number, Aggs),
+        {[{'11', make_agg(<<"randomosity">>,
+            null, null, null, null, null)}]},
+        "{number, '11'} is empty at start."
+    ),
+    ok.
+    
+test_get_empty() ->
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}),
+        make_agg(<<"yay description">>, null, null, null, null, null),
+        "Getting {testing, stuff} returns an empty aggregate."
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({number, '11'}),
+        make_agg(<<"randomosity">>, null, null, null, null, null),
+        "Getting {number, '11'} returns an empty aggregate."
+    ),
+    ok.
+
+test_count_stats() ->
+    lists:foreach(fun(_) ->
+        couch_stats_collector:increment({testing, stuff})
+    end, lists:seq(1, 100)),
+    couch_stats_aggregator:collect_sample(),
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}),
+        make_agg(<<"yay description">>, 100, 100, null, 100, 100),
+        "COUNT: Adding values changes the stats."
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}, 1),
+        make_agg(<<"yay description">>, 100, 100, null, 100, 100),
+        "COUNT: Adding values changes stats for all times."
+    ),
+
+    timer:sleep(500),
+    couch_stats_aggregator:collect_sample(),
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}),
+        make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100),
+        "COUNT: Removing values changes stats."
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}, 1),
+        make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100),
+        "COUNT: Removing values changes stats for all times."
+    ),
+
+    timer:sleep(600),
+    couch_stats_aggregator:collect_sample(),
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}),
+        make_agg(<<"yay description">>, 100, 33.333, 57.735, 0, 100),
+        "COUNT: Letting time passes doesn't remove data from time 0 aggregates"
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({testing, stuff}, 1),
+        make_agg(<<"yay description">>, 0, 0, 0, 0, 0),
+        "COUNT: Letting time pass removes data from other time aggregates."
+    ),
+    ok.
+
+test_abs_stats() ->
+    lists:foreach(fun(X) ->
+        couch_stats_collector:record({number, 11}, X)
+    end, lists:seq(0, 10)),
+    couch_stats_aggregator:collect_sample(),
+    etap:is(
+        couch_stats_aggregator:get_json({number, 11}),
+        make_agg(<<"randomosity">>, 5, 5, null, 5, 5),
+        "ABS: Adding values changes the stats."
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({number, 11}, 1),
+        make_agg(<<"randomosity">>, 5, 5, null, 5, 5),
+        "ABS: Adding values changes stats for all times."
+    ),
+
+    timer:sleep(500),
+    couch_stats_collector:record({number, 11}, 15),
+    couch_stats_aggregator:collect_sample(),
+    etap:is(
+        couch_stats_aggregator:get_json({number, 11}),
+        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
+        "ABS: New values changes stats"
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({number, 11}, 1),
+        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
+        "ABS: Removing values changes stats for all times."
+    ),
+
+    timer:sleep(600),
+    couch_stats_aggregator:collect_sample(),
+    etap:is(
+        couch_stats_aggregator:get_json({number, 11}),
+        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
+        "ABS: Letting time passes doesn't remove data from time 0 aggregates"
+    ),
+    etap:is(
+        couch_stats_aggregator:get_json({number, 11}, 1),
+        make_agg(<<"randomosity">>, 15, 15, null, 15, 15),
+        "ABS: Letting time pass removes data from other time aggregates."
+    ),
+    ok.
+
+make_agg(Desc, Sum, Mean, StdDev, Min, Max) ->
+    {[
+        {description, Desc},
+        {current, Sum},
+        {sum, Sum},
+        {mean, Mean},
+        {stddev, StdDev},
+        {min, Min},
+        {max, Max}
+    ]}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/130-attachments-md5.t
----------------------------------------------------------------------
diff --git a/src/test/etap/130-attachments-md5.t b/src/test/etap/130-attachments-md5.t
new file mode 100755
index 0000000..a91c9bf
--- /dev/null
+++ b/src/test/etap/130-attachments-md5.t
@@ -0,0 +1,248 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+test_db_name() ->
+    <<"etap-test-db">>.
+
+docid() ->
+    case get(docid) of
+        undefined ->
+            put(docid, 1),
+            "1";
+        Count ->
+            put(docid, Count+1),
+            integer_to_list(Count+1)
+    end.
+
+main(_) ->
+    test_util:init_code_path(),
+    
+    etap:plan(16),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    Addr = couch_config:get("httpd", "bind_address", any),
+    put(addr, Addr),
+    put(port, mochiweb_socket_server:get(couch_httpd, port)),
+    timer:sleep(1000),
+
+    couch_server:delete(test_db_name(), []),
+    couch_db:create(test_db_name(), []),
+
+    test_identity_without_md5(),
+    test_chunked_without_md5(),
+
+    test_identity_with_valid_md5(),
+    test_chunked_with_valid_md5_header(),
+    test_chunked_with_valid_md5_trailer(),
+
+    test_identity_with_invalid_md5(),
+    test_chunked_with_invalid_md5_header(),
+    test_chunked_with_invalid_md5_trailer(),
+
+    couch_server:delete(test_db_name(), []),
+    couch_server_sup:stop(),
+    ok.
+
+test_identity_without_md5() ->
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Content-Length: 34\r\n",
+        "\r\n",
+        "We all live in a yellow submarine!"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 201, "Stored with identity encoding and no MD5"),
+    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
+
+test_chunked_without_md5() ->
+    AttData = <<"We all live in a yellow submarine!">>,
+    <<Part1:21/binary, Part2:13/binary>> = AttData,
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Transfer-Encoding: chunked\r\n",
+        "\r\n",
+        to_hex(size(Part1)), "\r\n",
+        Part1, "\r\n",
+        to_hex(size(Part2)), "\r\n",
+        Part2, "\r\n"
+        "0\r\n"
+        "\r\n"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 201, "Stored with chunked encoding and no MD5"),
+    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
+
+test_identity_with_valid_md5() ->
+    AttData = "We all live in a yellow submarine!",
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Content-Length: 34\r\n",
+        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
+        "\r\n",
+        AttData],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 201, "Stored with identity encoding and valid MD5"),
+    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
+
+test_chunked_with_valid_md5_header() ->
+    AttData = <<"We all live in a yellow submarine!">>,
+    <<Part1:21/binary, Part2:13/binary>> = AttData,
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Transfer-Encoding: chunked\r\n",
+        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
+        "\r\n",
+        to_hex(size(Part1)), "\r\n",
+        Part1, "\r\n",
+        to_hex(size(Part2)), "\r\n",
+        Part2, "\r\n",
+        "0\r\n",
+        "\r\n"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 201, "Stored with chunked encoding and valid MD5 header."),
+    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
+
+test_chunked_with_valid_md5_trailer() ->
+    AttData = <<"We all live in a yellow submarine!">>,
+    <<Part1:21/binary, Part2:13/binary>> = AttData,
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Transfer-Encoding: chunked\r\n",
+        "Trailer: Content-MD5\r\n",
+        "\r\n",
+        to_hex(size(Part1)), "\r\n",
+        Part1, "\r\n",
+        to_hex(size(Part2)), "\r\n",
+        Part2, "\r\n",
+        "0\r\n",
+        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
+        "\r\n"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 201, "Stored with chunked encoding and valid MD5 trailer."),
+    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
+
+test_identity_with_invalid_md5() ->
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Content-Length: 34\r\n",
+        "Content-MD5: ", base64:encode(<<"foobar!">>), "\r\n",
+        "\r\n",
+        "We all live in a yellow submarine!"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 400, "Invalid MD5 header causes an error: identity"),
+    etap:is(
+        get_json(Json, [<<"error">>]),
+        <<"content_md5_mismatch">>,
+        "Body indicates reason for failure."
+    ).
+
+test_chunked_with_invalid_md5_header() ->
+    AttData = <<"We all live in a yellow submarine!">>,
+    <<Part1:21/binary, Part2:13/binary>> = AttData,
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Transfer-Encoding: chunked\r\n",
+        "Content-MD5: ", base64:encode(<<"so sneaky...">>), "\r\n",
+        "\r\n",
+        to_hex(size(Part1)), "\r\n",
+        Part1, "\r\n",
+        to_hex(size(Part2)), "\r\n",
+        Part2, "\r\n",
+        "0\r\n",
+        "\r\n"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 400, "Invalid MD5 header causes an error: chunked"),
+    etap:is(
+        get_json(Json, [<<"error">>]),
+        <<"content_md5_mismatch">>,
+        "Body indicates reason for failure."
+    ).
+
+test_chunked_with_invalid_md5_trailer() ->
+    AttData = <<"We all live in a yellow submarine!">>,
+    <<Part1:21/binary, Part2:13/binary>> = AttData,
+    Data = [
+        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
+        "Content-Type: text/plain\r\n",
+        "Transfer-Encoding: chunked\r\n",
+        "Trailer: Content-MD5\r\n",
+        "\r\n",
+        to_hex(size(Part1)), "\r\n",
+        Part1, "\r\n",
+        to_hex(size(Part2)), "\r\n",
+        Part2, "\r\n",
+        "0\r\n",
+        "Content-MD5: ", base64:encode(<<"Kool-Aid Fountain!">>), "\r\n",
+        "\r\n"],
+
+    {Code, Json} = do_request(Data),
+    etap:is(Code, 400, "Invalid MD5 Trailer causes an error"),
+    etap:is(
+        get_json(Json, [<<"error">>]),
+        <<"content_md5_mismatch">>,
+        "Body indicates reason for failure."
+    ).
+
+
+get_socket() ->
+    Options = [binary, {packet, 0}, {active, false}],
+    {ok, Sock} = gen_tcp:connect(get(addr), get(port), Options),
+    Sock.
+
+do_request(Request) ->
+    Sock = get_socket(),
+    gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
+    timer:sleep(1000),
+    {ok, R} = gen_tcp:recv(Sock, 0),
+    gen_tcp:close(Sock),
+    [Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]),
+    {ok, {http_response, _, Code, _}, _} =
+        erlang:decode_packet(http, Header, []),
+    Json = ejson:decode(Body),
+    {Code, Json}.
+
+get_json(Json, Path) ->
+    couch_util:get_nested_json_value(Json, Path).
+
+to_hex(Val) ->
+    to_hex(Val, []).
+
+to_hex(0, Acc) ->
+    Acc;
+to_hex(Val, Acc) ->
+    to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/140-attachment-comp.t
----------------------------------------------------------------------
diff --git a/src/test/etap/140-attachment-comp.t b/src/test/etap/140-attachment-comp.t
new file mode 100755
index 0000000..6f075ce
--- /dev/null
+++ b/src/test/etap/140-attachment-comp.t
@@ -0,0 +1,728 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+test_db_name() ->
+    <<"couch_test_atts_compression">>.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(85),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
+    timer:sleep(1000),
+    couch_server:delete(test_db_name(), []),
+    couch_db:create(test_db_name(), []),
+
+    couch_config:set("attachments", "compression_level", "8", false),
+    couch_config:set("attachments", "compressible_types", "text/*", false),
+
+    create_1st_text_att(),
+    create_1st_png_att(),
+    create_2nd_text_att(),
+    create_2nd_png_att(),
+
+    tests_for_1st_text_att(),
+    tests_for_1st_png_att(),
+    tests_for_2nd_text_att(),
+    tests_for_2nd_png_att(),
+
+    create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
+    test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
+
+    test_create_already_compressed_att_with_invalid_content_encoding(
+        db_url() ++ "/doc_att_deflate",
+        "readme.txt",
+        zlib:compress(test_text_data()),
+        "deflate"
+    ),
+
+    % COUCHDB-1711 - avoid weird timng/scheduling/request handling issue
+    timer:sleep(100),
+
+    test_create_already_compressed_att_with_invalid_content_encoding(
+        db_url() ++ "/doc_att_compress",
+        "readme.txt",
+        % Note: As of OTP R13B04, it seems there's no LZW compression
+        % (i.e. UNIX compress utility implementation) lib in OTP.
+        % However there's a simple working Erlang implementation at:
+        % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
+        test_text_data(),
+        "compress"
+    ),
+
+    test_compressible_type_with_parameters(),
+
+    timer:sleep(3000), % to avoid mochiweb socket closed exceptions
+    couch_server:delete(test_db_name(), []),
+    couch_server_sup:stop(),
+    ok.
+
+db_url() ->
+    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+    binary_to_list(test_db_name()).
+
+create_1st_text_att() ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/testdoc1/readme.txt",
+        [{"Content-Type", "text/plain"}],
+        put,
+        test_text_data()),
+    etap:is(Code, 201, "Created text attachment using the standalone api"),
+    ok.
+
+create_1st_png_att() ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/testdoc2/icon.png",
+        [{"Content-Type", "image/png"}],
+        put,
+        test_png_data()),
+    etap:is(Code, 201, "Created png attachment using the standalone api"),
+    ok.
+
+% create a text attachment using the non-standalone attachment api
+create_2nd_text_att() ->
+    DocJson = {[
+        {<<"_attachments">>, {[
+            {<<"readme.txt">>, {[
+                {<<"content_type">>, <<"text/plain">>},
+                {<<"data">>, base64:encode(test_text_data())}
+            ]}
+        }]}}
+    ]},
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/testdoc3",
+        [{"Content-Type", "application/json"}],
+        put,
+        ejson:encode(DocJson)),
+    etap:is(Code, 201, "Created text attachment using the non-standalone api"),
+    ok.
+
+% create a png attachment using the non-standalone attachment api
+create_2nd_png_att() ->
+    DocJson = {[
+        {<<"_attachments">>, {[
+            {<<"icon.png">>, {[
+                {<<"content_type">>, <<"image/png">>},
+                {<<"data">>, base64:encode(test_png_data())}
+            ]}
+        }]}}
+    ]},
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/testdoc4",
+        [{"Content-Type", "application/json"}],
+        put,
+        ejson:encode(DocJson)),
+    etap:is(Code, 201, "Created png attachment using the non-standalone api"),
+    ok.
+
+create_already_compressed_att(DocUri, AttName) ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        DocUri ++ "/" ++ AttName,
+        [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}],
+        put,
+        zlib:gzip(test_text_data())),
+    etap:is(
+        Code,
+        201,
+        "Created already compressed attachment using the standalone api"
+    ),
+    ok.
+
+tests_for_1st_text_att() ->
+    test_get_1st_text_att_with_accept_encoding_gzip(),
+    test_get_1st_text_att_without_accept_encoding_header(),
+    test_get_1st_text_att_with_accept_encoding_deflate(),
+    test_get_1st_text_att_with_accept_encoding_deflate_only(),
+    test_get_doc_with_1st_text_att(),
+    test_1st_text_att_stub().
+
+tests_for_1st_png_att() ->
+    test_get_1st_png_att_without_accept_encoding_header(),
+    test_get_1st_png_att_with_accept_encoding_gzip(),
+    test_get_1st_png_att_with_accept_encoding_deflate(),
+    test_get_doc_with_1st_png_att(),
+    test_1st_png_att_stub().
+
+tests_for_2nd_text_att() ->
+    test_get_2nd_text_att_with_accept_encoding_gzip(),
+    test_get_2nd_text_att_without_accept_encoding_header(),
+    test_get_doc_with_2nd_text_att(),
+    test_2nd_text_att_stub().
+
+tests_for_2nd_png_att() ->
+    test_get_2nd_png_att_without_accept_encoding_header(),
+    test_get_2nd_png_att_with_accept_encoding_gzip(),
+    test_get_doc_with_2nd_png_att(),
+    test_2nd_png_att_stub().
+
+test_get_1st_text_att_with_accept_encoding_gzip() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc1/readme.txt",
+        [{"Accept-Encoding", "gzip"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, true, "received body is gziped"),
+    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
+    etap:is(
+        Uncompressed,
+        test_text_data(),
+        "received data for the 1st text attachment is ok"
+    ),
+    ok.
+
+test_get_1st_text_att_without_accept_encoding_header() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc1/readme.txt",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, false, "received body is not gziped"),
+    etap:is(
+        iolist_to_binary(Body),
+        test_text_data(),
+        "received data for the 1st text attachment is ok"
+    ),
+    ok.
+
+test_get_1st_text_att_with_accept_encoding_deflate() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc1/readme.txt",
+        [{"Accept-Encoding", "deflate"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, false, "received body is not gziped"),
+    Deflated = lists:member({"Content-Encoding", "deflate"}, Headers),
+    etap:is(Deflated, false, "received body is not deflated"),
+    etap:is(
+        iolist_to_binary(Body),
+        test_text_data(),
+        "received data for the 1st text attachment is ok"
+    ),
+    ok.
+
+test_get_1st_text_att_with_accept_encoding_deflate_only() ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/testdoc1/readme.txt",
+        [{"Accept-Encoding", "deflate, *;q=0"}],
+        get),
+    etap:is(
+        Code,
+        406,
+        "HTTP response code is 406 for an unsupported content encoding request"
+    ),
+    ok.
+
+test_get_1st_png_att_without_accept_encoding_header() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc2/icon.png",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Encoding = couch_util:get_value("Content-Encoding", Headers),
+    etap:is(Encoding, undefined, "received body is not gziped"),
+    etap:is(
+        iolist_to_binary(Body),
+        test_png_data(),
+        "received data for the 1st png attachment is ok"
+    ),
+    ok.
+
+test_get_1st_png_att_with_accept_encoding_gzip() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc2/icon.png",
+        [{"Accept-Encoding", "gzip"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Encoding = couch_util:get_value("Content-Encoding", Headers),
+    etap:is(Encoding, undefined, "received body is not gziped"),
+    etap:is(
+        iolist_to_binary(Body),
+        test_png_data(),
+        "received data for the 1st png attachment is ok"
+    ),
+    ok.
+
+test_get_1st_png_att_with_accept_encoding_deflate() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc2/icon.png",
+        [{"Accept-Encoding", "deflate"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Encoding = couch_util:get_value("Content-Encoding", Headers),
+    etap:is(Encoding, undefined, "received body is in identity form"),
+    etap:is(
+        iolist_to_binary(Body),
+        test_png_data(),
+        "received data for the 1st png attachment is ok"
+    ),
+    ok.
+
+test_get_doc_with_1st_text_att() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc1?attachments=true",
+        [{"Accept", "application/json"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    TextAttJson = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"readme.txt">>]
+    ),
+    TextAttType = couch_util:get_nested_json_value(
+        TextAttJson,
+        [<<"content_type">>]
+    ),
+    TextAttData = couch_util:get_nested_json_value(
+        TextAttJson,
+        [<<"data">>]
+    ),
+    etap:is(
+        TextAttType,
+        <<"text/plain">>,
+        "1st text attachment has type text/plain"
+    ),
+    %% check the attachment's data is the base64 encoding of the plain text
+    %% and not the base64 encoding of the gziped plain text
+    etap:is(
+        TextAttData,
+        base64:encode(test_text_data()),
+        "1st text attachment data is properly base64 encoded"
+    ),
+    ok.
+
+test_1st_text_att_stub() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc1?att_encoding_info=true",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    {TextAttJson} = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"readme.txt">>]
+    ),
+    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+    etap:is(
+        TextAttLength,
+        byte_size(test_text_data()),
+        "1st text attachment stub length matches the uncompressed length"
+    ),
+    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+    etap:is(
+        TextAttEncoding,
+        <<"gzip">>,
+        "1st text attachment stub has the encoding field set to gzip"
+    ),
+    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+    etap:is(
+        TextAttEncLength,
+        iolist_size(zlib:gzip(test_text_data())),
+        "1st text attachment stub encoded_length matches the compressed length"
+    ),
+    ok.
+
+test_get_doc_with_1st_png_att() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc2?attachments=true",
+        [{"Accept", "application/json"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    PngAttJson = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"icon.png">>]
+    ),
+    PngAttType = couch_util:get_nested_json_value(
+        PngAttJson,
+        [<<"content_type">>]
+    ),
+    PngAttData = couch_util:get_nested_json_value(
+        PngAttJson,
+        [<<"data">>]
+    ),
+    etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
+    etap:is(
+        PngAttData,
+        base64:encode(test_png_data()),
+        "1st png attachment data is properly base64 encoded"
+    ),
+    ok.
+
+test_1st_png_att_stub() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc2?att_encoding_info=true",
+        [{"Accept", "application/json"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    {PngAttJson} = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"icon.png">>]
+    ),
+    PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
+    etap:is(
+        PngAttLength,
+        byte_size(test_png_data()),
+        "1st png attachment stub length matches the uncompressed length"
+    ),
+    PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
+    etap:is(
+        PngEncoding,
+        undefined,
+        "1st png attachment stub doesn't have an encoding field"
+    ),
+    PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
+    etap:is(
+        PngEncLength,
+        undefined,
+        "1st png attachment stub doesn't have an encoded_length field"
+    ),
+    ok.
+
+test_get_2nd_text_att_with_accept_encoding_gzip() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc3/readme.txt",
+        [{"Accept-Encoding", "gzip"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, true, "received body is gziped"),
+    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
+    etap:is(
+        Uncompressed,
+        test_text_data(),
+        "received data for the 2nd text attachment is ok"
+    ),
+    ok.
+
+test_get_2nd_text_att_without_accept_encoding_header() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc3/readme.txt",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, false, "received body is not gziped"),
+    etap:is(
+        Body,
+        test_text_data(),
+        "received data for the 2nd text attachment is ok"
+    ),
+    ok.
+
+test_get_2nd_png_att_without_accept_encoding_header() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc4/icon.png",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, false, "received body is not gziped"),
+    etap:is(
+        Body,
+        test_png_data(),
+        "received data for the 2nd png attachment is ok"
+    ),
+    ok.
+
+test_get_2nd_png_att_with_accept_encoding_gzip() ->
+    {ok, Code, Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc4/icon.png",
+        [{"Accept-Encoding", "gzip"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, false, "received body is not gziped"),
+    etap:is(
+        Body,
+        test_png_data(),
+        "received data for the 2nd png attachment is ok"
+    ),
+    ok.
+
+test_get_doc_with_2nd_text_att() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc3?attachments=true",
+        [{"Accept", "application/json"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    TextAttJson = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"readme.txt">>]
+    ),
+    TextAttType = couch_util:get_nested_json_value(
+        TextAttJson,
+        [<<"content_type">>]
+    ),
+    TextAttData = couch_util:get_nested_json_value(
+        TextAttJson,
+        [<<"data">>]
+    ),
+    etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"),
+    %% check the attachment's data is the base64 encoding of the plain text
+    %% and not the base64 encoding of the gziped plain text
+    etap:is(
+        TextAttData,
+        base64:encode(test_text_data()),
+        "2nd text attachment data is properly base64 encoded"
+    ),
+    ok.
+
+test_2nd_text_att_stub() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc3?att_encoding_info=true",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    {TextAttJson} = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"readme.txt">>]
+    ),
+    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+    etap:is(
+        TextAttLength,
+        byte_size(test_text_data()),
+        "2nd text attachment stub length matches the uncompressed length"
+    ),
+    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+    etap:is(
+        TextAttEncoding,
+        <<"gzip">>,
+        "2nd text attachment stub has the encoding field set to gzip"
+    ),
+    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+    etap:is(
+        TextAttEncLength,
+        iolist_size(zlib:gzip(test_text_data())),
+        "2nd text attachment stub encoded_length matches the compressed length"
+    ),
+    ok.
+
+test_get_doc_with_2nd_png_att() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc4?attachments=true",
+        [{"Accept", "application/json"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    PngAttJson = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"icon.png">>]
+    ),
+    PngAttType = couch_util:get_nested_json_value(
+        PngAttJson,
+        [<<"content_type">>]
+    ),
+    PngAttData = couch_util:get_nested_json_value(
+        PngAttJson,
+        [<<"data">>]
+    ),
+    etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
+    etap:is(
+        PngAttData,
+        base64:encode(test_png_data()),
+        "2nd png attachment data is properly base64 encoded"
+    ),
+    ok.
+
+test_2nd_png_att_stub() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/testdoc4?att_encoding_info=true",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    {PngAttJson} = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"icon.png">>]
+    ),
+    PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
+    etap:is(
+        PngAttLength,
+        byte_size(test_png_data()),
+        "2nd png attachment stub length matches the uncompressed length"
+    ),
+    PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
+    etap:is(
+        PngEncoding,
+        undefined,
+        "2nd png attachment stub doesn't have an encoding field"
+    ),
+    PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
+    etap:is(
+        PngEncLength,
+        undefined,
+        "2nd png attachment stub doesn't have an encoded_length field"
+    ),
+    ok.
+
+test_already_compressed_att(DocUri, AttName) ->
+    test_get_already_compressed_att_with_accept_gzip(DocUri, AttName),
+    test_get_already_compressed_att_without_accept(DocUri, AttName),
+    test_get_already_compressed_att_stub(DocUri, AttName).
+
+test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) ->
+    {ok, Code, Headers, Body} = test_util:request(
+        DocUri ++ "/" ++ AttName,
+        [{"Accept-Encoding", "gzip"}],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, true, "received body is gziped"),
+    etap:is(
+        Body,
+        zlib:gzip(test_text_data()),
+        "received data for the already compressed attachment is ok"
+    ),
+    ok.
+
+test_get_already_compressed_att_without_accept(DocUri, AttName) ->
+    {ok, Code, Headers, Body} = test_util:request(
+        DocUri ++ "/" ++ AttName,
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
+    etap:is(Gziped, false, "received body is not gziped"),
+    etap:is(
+        Body,
+        test_text_data(),
+        "received data for the already compressed attachment is ok"
+    ),
+    ok.
+
+test_get_already_compressed_att_stub(DocUri, AttName) ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        DocUri ++ "?att_encoding_info=true",
+        [],
+        get),
+    etap:is(Code, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body),
+    {AttJson} = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, iolist_to_binary(AttName)]
+    ),
+    AttLength = couch_util:get_value(<<"length">>, AttJson),
+    etap:is(
+        AttLength,
+        iolist_size((zlib:gzip(test_text_data()))),
+        "Already compressed attachment stub length matches the "
+        "compressed length"
+    ),
+    Encoding = couch_util:get_value(<<"encoding">>, AttJson),
+    etap:is(
+        Encoding,
+        <<"gzip">>,
+        "Already compressed attachment stub has the encoding field set to gzip"
+    ),
+    EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
+    etap:is(
+        EncLength,
+        AttLength,
+        "Already compressed attachment stub encoded_length matches the "
+        "length field value"
+    ),
+    ok.
+
+test_create_already_compressed_att_with_invalid_content_encoding(
+    DocUri, AttName, AttData, Encoding) ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        DocUri ++ "/" ++ AttName,
+        [{"Content-Encoding", Encoding}, {"Content-Type", "text/plain"}],
+        put,
+        AttData),
+    etap:is(
+        Code,
+        415,
+        "Couldn't create an already compressed attachment using the "
+        "unsupported encoding '" ++ Encoding ++ "'"
+    ),
+    ok.
+
+test_compressible_type_with_parameters() ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/testdoc5/readme.txt",
+        [{"Content-Type", "text/plain; charset=UTF-8"}],
+        put,
+        test_text_data()),
+    etap:is(Code, 201, "Created text attachment with MIME type "
+        "'text/plain; charset=UTF-8' using the standalone api"),
+    {ok, Code2, Headers2, Body} = test_util:request(
+        db_url() ++ "/testdoc5/readme.txt",
+        [{"Accept-Encoding", "gzip"}],
+        get),
+    etap:is(Code2, 200, "HTTP response code is 200"),
+    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers2),
+    etap:is(Gziped, true, "received body is gziped"),
+    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
+    etap:is(Uncompressed, test_text_data(), "received data is gzipped"),
+    {ok, Code3, _Headers3, Body3} = test_util:request(
+        db_url() ++ "/testdoc5?att_encoding_info=true",
+        [],
+        get),
+    etap:is(Code3, 200, "HTTP response code is 200"),
+    Json = ejson:decode(Body3),
+    {TextAttJson} = couch_util:get_nested_json_value(
+        Json,
+        [<<"_attachments">>, <<"readme.txt">>]
+    ),
+    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
+    etap:is(
+        TextAttLength,
+        byte_size(test_text_data()),
+        "text attachment stub length matches the uncompressed length"
+    ),
+    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
+    etap:is(
+        TextAttEncoding,
+        <<"gzip">>,
+        "text attachment stub has the encoding field set to gzip"
+    ),
+    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
+    etap:is(
+        TextAttEncLength,
+        iolist_size(zlib:gzip(test_text_data())),
+        "text attachment stub encoded_length matches the compressed length"
+    ),
+    ok.
+
+test_png_data() ->
+    {ok, Data} = file:read_file(
+        test_util:source_file("share/www/image/logo.png")
+    ),
+    Data.
+
+test_text_data() ->
+    {ok, Data} = file:read_file(
+        test_util:source_file("README.rst")
+    ),
+    Data.


[03/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/150-invalid-view-seq.t
----------------------------------------------------------------------
diff --git a/test/etap/150-invalid-view-seq.t b/test/etap/150-invalid-view-seq.t
deleted file mode 100755
index 681875a..0000000
--- a/test/etap/150-invalid-view-seq.t
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-test_db_name() ->
-    <<"couch_test_invalid_view_seq">>.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(10),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-%% NOTE: since during the test we stop the server,
-%%       a huge and ugly but harmless stack trace is sent to stderr
-%%
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    timer:sleep(1000),
-    delete_db(),
-    create_db(),
-
-    create_docs(),
-    create_design_doc(),
-
-    % make DB file backup
-    backup_db_file(),
-
-    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-
-    create_new_doc(),
-    query_view_before_restore_backup(),
-
-    % restore DB file backup after querying view
-    restore_backup_db_file(),
-
-    query_view_after_restore_backup(),
-
-    delete_db(),
-    couch_server_sup:stop(),
-    ok.
-
-admin_user_ctx() ->
-    {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
-
-create_db() ->
-    {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
-
-delete_db() ->
-    couch_server:delete(test_db_name(), [admin_user_ctx()]).
-
-create_docs() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    Doc1 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc1">>},
-        {<<"value">>, 1}
-
-    ]}),
-    Doc2 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc2">>},
-        {<<"value">>, 2}
-
-    ]}),
-    Doc3 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc3">>},
-        {<<"value">>, 3}
-
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-create_design_doc() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/foo">>},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {<<"bar">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [DDoc]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-backup_db_file() ->
-    DbFile = test_util:build_file("tmp/lib/" ++
-        binary_to_list(test_db_name()) ++ ".couch"),
-    {ok, _} = file:copy(DbFile, DbFile ++ ".backup"),
-    ok.
-
-create_new_doc() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    Doc666 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc666">>},
-        {<<"value">>, 999}
-
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [Doc666]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-db_url() ->
-    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
-    binary_to_list(test_db_name()).
-
-query_view_before_restore_backup() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/_design/foo/_view/bar", [], get),
-    etap:is(Code, 200, "Got view response before restoring backup."),
-    ViewJson = ejson:decode(Body),
-    Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
-    HasDoc1 = has_doc("doc1", Rows),
-    HasDoc2 = has_doc("doc2", Rows),
-    HasDoc3 = has_doc("doc3", Rows),
-    HasDoc666 = has_doc("doc666", Rows),
-    etap:is(HasDoc1, true, "Before backup restore, view has doc1"),
-    etap:is(HasDoc2, true, "Before backup restore, view has doc2"),
-    etap:is(HasDoc3, true, "Before backup restore, view has doc3"),
-    etap:is(HasDoc666, true, "Before backup restore, view has doc666"),
-    ok.
-
-has_doc(DocId1, Rows) ->
-    DocId = iolist_to_binary(DocId1),
-    lists:any(
-        fun({R}) -> lists:member({<<"id">>, DocId}, R) end,
-        Rows
-    ).
-
-restore_backup_db_file() ->
-    couch_server_sup:stop(),
-    timer:sleep(3000),
-    DbFile = test_util:build_file("tmp/lib/" ++
-        binary_to_list(test_db_name()) ++ ".couch"),
-    ok = file:delete(DbFile),
-    ok = file:rename(DbFile ++ ".backup", DbFile),
-    couch_server_sup:start_link(test_util:config_files()),
-    timer:sleep(1000),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-    ok.
-
-query_view_after_restore_backup() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/_design/foo/_view/bar", [], get),
-    etap:is(Code, 200, "Got view response after restoring backup."),
-    ViewJson = ejson:decode(Body),
-    Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
-    HasDoc1 = has_doc("doc1", Rows),
-    HasDoc2 = has_doc("doc2", Rows),
-    HasDoc3 = has_doc("doc3", Rows),
-    HasDoc666 = has_doc("doc666", Rows),
-    etap:is(HasDoc1, true, "After backup restore, view has doc1"),
-    etap:is(HasDoc2, true, "After backup restore, view has doc2"),
-    etap:is(HasDoc3, true, "After backup restore, view has doc3"),
-    etap:is(HasDoc666, false, "After backup restore, view does not have doc666"),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/160-vhosts.t
----------------------------------------------------------------------
diff --git a/test/etap/160-vhosts.t b/test/etap/160-vhosts.t
deleted file mode 100755
index 46fdd73..0000000
--- a/test/etap/160-vhosts.t
+++ /dev/null
@@ -1,371 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-server() ->
-    lists:concat([
-        "http://127.0.0.1:", mochiweb_socket_server:get(couch_httpd, port), "/"
-    ]).
-
-dbname() -> "etap-test-db".
-admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(20),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    ibrowse:start(),
-    crypto:start(),
-
-    timer:sleep(1000),
-    couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
-    {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]),
-
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc1">>},
-        {<<"value">>, 666}
-    ]}),
-
-    Doc1 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/doc1">>},
-        {<<"shows">>, {[
-            {<<"test">>, <<"function(doc, req) {
-    return { json: {
-        requested_path: '/' + req.requested_path.join('/'),
-        path: '/' + req.path.join('/')
-    }};
-}">>}
-        ]}},
-        {<<"rewrites">>, [
-            {[
-                {<<"from">>, <<"/">>},
-                {<<"to">>, <<"_show/test">>}
-            ]}
-        ]}
-    ]}),
-
-    {ok, _} = couch_db:update_docs(Db, [Doc, Doc1]),
-
-    couch_db:ensure_full_commit(Db),
-
-    %% end boilerplate, start test
-
-    ok = couch_config:set("vhosts", "example.com", "/etap-test-db", false),
-    ok = couch_config:set("vhosts", "*.example.com",
-            "/etap-test-db/_design/doc1/_rewrite", false),
-    ok = couch_config:set("vhosts", "example.com/test", "/etap-test-db", false),
-    ok = couch_config:set("vhosts", "example1.com",
-            "/etap-test-db/_design/doc1/_rewrite/", false),
-    ok = couch_config:set("vhosts",":appname.:dbname.example1.com",
-            "/:dbname/_design/:appname/_rewrite/", false),
-    ok = couch_config:set("vhosts", ":dbname.example1.com", "/:dbname", false),
-
-    ok = couch_config:set("vhosts", "*.example2.com", "/*", false),
-    ok = couch_config:set("vhosts", "*.example2.com/test", "/*", false),
-    ok = couch_config:set("vhosts", "*/test", "/etap-test-db", false),
-    ok = couch_config:set("vhosts", "*/test1",
-            "/etap-test-db/_design/doc1/_show/test", false),
-    ok = couch_config:set("vhosts", "example3.com", "/", false),
-
-    %% reload rules
-    couch_httpd_vhost:reload(),
-
-    test_regular_request(),
-    test_vhost_request(),
-    test_vhost_request_with_qs(),
-    test_vhost_request_with_global(),
-    test_vhost_requested_path(),
-    test_vhost_requested_path_path(),
-    test_vhost_request_wildcard(),
-    test_vhost_request_replace_var(),
-    test_vhost_request_replace_var1(),
-    test_vhost_request_replace_wildcard(),
-    test_vhost_request_path(),
-    test_vhost_request_path1(),
-    test_vhost_request_path2(),
-    test_vhost_request_path3(),
-    test_vhost_request_to_root(),
-    test_vhost_request_with_oauth(Db),
-
-    %% restart boilerplate
-    couch_db:close(Db),
-    ok = couch_server:delete(couch_db:name(Db), [admin_user_ctx()]),
-    timer:sleep(3000),
-    couch_server_sup:stop(),
-
-    ok.
-
-test_regular_request() ->
-    case ibrowse:send_req(server(), [], get, []) of
-        {ok, _, _, Body} ->
-            {Props} = ejson:decode(Body),
-            Couchdb = couch_util:get_value(<<"couchdb">>, Props),
-            Version = couch_util:get_value(<<"version">>, Props),
-            Vendor = couch_util:get_value(<<"vendor">>, Props),
-            etap:isnt(Couchdb, undefined, "Found couchdb property"),
-            etap:isnt(Version, undefined, "Found version property"),
-            etap:isnt(Vendor, undefined, "Found vendor property");
-        _Else ->
-            etap:bail("http GET / request failed")
-    end.
-
-test_vhost_request() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else ->
-           etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_with_qs() ->
-    Url = server() ++ "doc1?revs_info=true",
-    case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonProps} = ejson:decode(Body),
-            HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
-            etap:is(HasRevsInfo, true, "should return _revs_info");
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_with_global() ->
-    Url2 = server() ++ "_utils/index.html",
-    case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body2} ->
-            "<!DOCTYPE" ++ _Foo = Body2,
-            etap:is(true, true, "should serve /_utils even inside vhosts");
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_requested_path() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"requested_path">>, Json) of
-                <<"/">> -> true;
-                _ -> false
-            end, true, <<"requested path in req ok">>);
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_requested_path_path() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"path in req ok">>);
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_wildcard()->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "test.example.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"wildcard  ok">>);
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-
-test_vhost_request_replace_var() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example1.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_replace_var1() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "doc1.etap-test-db.example1.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"wildcard  ok">>);
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_replace_wildcard() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path() ->
-    Uri = server() ++ "test",
-    case ibrowse:send_req(Uri, [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path1() ->
-    Url = server() ++ "test/doc1?revs_info=true",
-    case ibrowse:send_req(Url, [], get, [], []) of
-        {ok, _, _, Body} ->
-            {JsonProps} = ejson:decode(Body),
-            HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
-            etap:is(HasRevsInfo, true, "should return _revs_info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path2() ->
-    Uri = server() ++ "test",
-    case ibrowse:send_req(Uri, [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path3() ->
-    Uri = server() ++ "test1",
-    case ibrowse:send_req(Uri, [], get, [], []) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"path in req ok">>);
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_to_root() ->
-    Uri = server(),
-    case ibrowse:send_req(Uri, [], get, [], []) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasCouchDBWelcome = proplists:is_defined(<<"couchdb">>, JsonBody),
-            etap:is(HasCouchDBWelcome, true, "should allow redirect to /");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_with_oauth(Db) ->
-    {ok, AuthDb} = couch_db:create(
-        <<"tap_test_sec_db">>, [admin_user_ctx(), overwrite]),
-    PrevAuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
-    couch_config:set("couch_httpd_auth", "authentication_db", "tap_test_sec_db", false),
-    couch_config:set("oauth_token_users", "otoksec1", "joe", false),
-    couch_config:set("oauth_consumer_secrets", "consec1", "foo", false),
-    couch_config:set("oauth_token_secrets", "otoksec1", "foobar", false),
-    couch_config:set("couch_httpd_auth", "require_valid_user", "true", false),
-
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/test">>},
-        {<<"language">>, <<"javascript">>},
-        {<<"rewrites">>, [
-            {[
-                {<<"from">>, <<"foobar">>},
-                {<<"to">>, <<"_info">>}
-            ]}
-        ]}
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, []),
-
-    RewritePath = "/etap-test-db/_design/test/_rewrite/foobar",
-    ok = couch_config:set("vhosts", "oauth-example.com", RewritePath, false),
-    couch_httpd_vhost:reload(),
-
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "oauth-example.com"}]) of
-        {ok, "401", _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            etap:is(
-                couch_util:get_value(<<"error">>, JsonBody),
-                <<"unauthorized">>,
-                "Request without OAuth credentials failed");
-        Error ->
-           etap:bail("Request without OAuth credentials did not fail: " ++
-               couch_util:to_list(Error))
-    end,
-
-    JoeDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"org.couchdb.user:joe">>},
-        {<<"type">>, <<"user">>},
-        {<<"name">>, <<"joe">>},
-        {<<"roles">>, []},
-        {<<"password_sha">>, <<"fe95df1ca59a9b567bdca5cbaf8412abd6e06121">>},
-        {<<"salt">>, <<"4e170ffeb6f34daecfd814dfb4001a73">>}
-    ]}),
-    {ok, _} = couch_db:update_doc(AuthDb, JoeDoc, []),
-
-    Url = "http://oauth-example.com/",
-    Consumer = {"consec1", "foo", hmac_sha1},
-    SignedParams = oauth:sign(
-        "GET", Url, [], Consumer, "otoksec1", "foobar"),
-    OAuthUrl = oauth:uri(server(), SignedParams),
-
-    case ibrowse:send_req(OAuthUrl, [], get, [], [{host_header, "oauth-example.com"}]) of
-        {ok, "200", _, Body2} ->
-            {JsonBody2} = ejson:decode(Body2),
-            etap:is(couch_util:get_value(<<"name">>, JsonBody2), <<"test">>,
-                "should return ddoc info with OAuth credentials");
-        Error2 ->
-           etap:bail("Failed to access vhost with OAuth credentials: " ++
-               couch_util:to_list(Error2))
-    end,
-
-    Consumer2 = {"consec1", "bad_secret", hmac_sha1},
-    SignedParams2 = oauth:sign(
-        "GET", Url, [], Consumer2, "otoksec1", "foobar"),
-    OAuthUrl2 = oauth:uri(server(), SignedParams2),
-
-    case ibrowse:send_req(OAuthUrl2, [], get, [], [{host_header, "oauth-example.com"}]) of
-        {ok, "401", _, Body3} ->
-            {JsonBody3} = ejson:decode(Body3),
-            etap:is(
-                couch_util:get_value(<<"error">>, JsonBody3),
-                <<"unauthorized">>,
-                "Request with bad OAuth credentials failed");
-        Error3 ->
-           etap:bail("Failed to access vhost with bad OAuth credentials: " ++
-               couch_util:to_list(Error3))
-    end,
-
-    couch_config:set("couch_httpd_auth", "authentication_db", PrevAuthDbName, false),
-    couch_config:set("couch_httpd_auth", "require_valid_user", "false", false),
-    ok = couch_server:delete(couch_db:name(AuthDb), [admin_user_ctx()]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/170-os-daemons.es
----------------------------------------------------------------------
diff --git a/test/etap/170-os-daemons.es b/test/etap/170-os-daemons.es
deleted file mode 100755
index 73974e9..0000000
--- a/test/etap/170-os-daemons.es
+++ /dev/null
@@ -1,26 +0,0 @@
-#! /usr/bin/env escript
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-loop() ->
-    loop(io:read("")).
-
-loop({ok, _}) ->
-    loop(io:read(""));
-loop(eof) ->
-    stop;
-loop({error, Reason}) ->
-    throw({error, Reason}).
-
-main([]) ->
-    loop().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/170-os-daemons.t
----------------------------------------------------------------------
diff --git a/test/etap/170-os-daemons.t b/test/etap/170-os-daemons.t
deleted file mode 100755
index 6feaa1b..0000000
--- a/test/etap/170-os-daemons.t
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-config_files() ->
-    lists:map(fun test_util:build_file/1, [
-        "etc/couchdb/default_dev.ini"
-    ]).
-
-daemon_cmd() ->
-    test_util:source_file("test/etap/170-os-daemons.es").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(49),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(config_files()),
-    couch_os_daemons:start_link(),
-
-    etap:diag("Daemons boot after configuration added."),
-    couch_config:set("os_daemons", "foo", daemon_cmd(), false),
-    timer:sleep(1000),
-    
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, "foo"),
-
-    % Check table form
-    {ok, Tab1} = couch_os_daemons:info(),
-    [T1] = ets:tab2list(Tab1),
-    check_daemon(T1, "foo"),
-
-    etap:diag("Daemons stop after configuration removed."),
-    couch_config:delete("os_daemons", "foo", false),
-    timer:sleep(500),
-    
-    {ok, []} = couch_os_daemons:info([table]),
-    {ok, Tab2} = couch_os_daemons:info(),
-    etap:is(ets:tab2list(Tab2), [], "As table returns empty table."),
-    
-    etap:diag("Adding multiple daemons causes both to boot."),
-    couch_config:set("os_daemons", "bar", daemon_cmd(), false),
-    couch_config:set("os_daemons", "baz", daemon_cmd(), false),
-    timer:sleep(500),
-    {ok, Daemons} = couch_os_daemons:info([table]),
-    lists:foreach(fun(D) ->
-        check_daemon(D)
-    end, Daemons),
-
-    {ok, Tab3} = couch_os_daemons:info(),
-    lists:foreach(fun(D) ->
-        check_daemon(D)
-    end, ets:tab2list(Tab3)),
-    
-    etap:diag("Removing one daemon leaves the other alive."),
-    couch_config:delete("os_daemons", "bar", false),
-    timer:sleep(500),
-    
-    {ok, [D2]} = couch_os_daemons:info([table]),
-    check_daemon(D2, "baz"),
-    
-    % Check table version
-    {ok, Tab4} = couch_os_daemons:info(),
-    [T4] = ets:tab2list(Tab4),
-    check_daemon(T4, "baz"),
-    
-    ok.
-
-check_daemon(D) ->
-    check_daemon(D, D#daemon.name).
-
-check_daemon(D, Name) ->
-    BaseName = "170-os-daemons.es",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.errors, [], "No errors occurred while booting."),
-    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/171-os-daemons-config.es
----------------------------------------------------------------------
diff --git a/test/etap/171-os-daemons-config.es b/test/etap/171-os-daemons-config.es
deleted file mode 100755
index b4a914e..0000000
--- a/test/etap/171-os-daemons-config.es
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/env escript
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() ->
-    list_to_binary(test_util:source_file("test/etap/171-os-daemons-config.es")).
-
-read() ->
-    case io:get_line('') of
-        eof ->
-            stop;
-        Data ->
-            ejson:decode(Data)
-    end.
-
-write(Mesg) ->
-    Data = iolist_to_binary(ejson:encode(Mesg)),
-    io:format(binary_to_list(Data) ++ "\n", []).
-
-get_cfg(Section) ->
-    write([<<"get">>, Section]),
-    read().
-
-get_cfg(Section, Name) ->
-    write([<<"get">>, Section, Name]),
-    read().
-
-log(Mesg) ->
-    write([<<"log">>, Mesg]).
-
-log(Mesg, Level) ->
-    write([<<"log">>, Mesg, {[{<<"level">>, Level}]}]).
-
-test_get_cfg1() ->
-    FileName = filename(),
-    {[{<<"foo">>, FileName}]} = get_cfg(<<"os_daemons">>).
-
-test_get_cfg2() ->
-    FileName = filename(),
-    FileName = get_cfg(<<"os_daemons">>, <<"foo">>),
-    <<"sequential">> = get_cfg(<<"uuids">>, <<"algorithm">>).
-
-test_get_unknown_cfg() ->
-    {[]} = get_cfg(<<"aal;3p4">>),
-    null = get_cfg(<<"aal;3p4">>, <<"313234kjhsdfl">>).
-
-test_log() ->
-    log(<<"foobar!">>),
-    log(<<"some stuff!">>, <<"debug">>),
-    log(2),
-    log(true),
-    write([<<"log">>, <<"stuff">>, 2]),
-    write([<<"log">>, 3, null]),
-    write([<<"log">>, [1, 2], {[{<<"level">>, <<"debug">>}]}]),
-    write([<<"log">>, <<"true">>, {[]}]).
-
-do_tests() ->
-    test_get_cfg1(),
-    test_get_cfg2(),
-    test_get_unknown_cfg(),
-    test_log(),
-    loop(io:read("")).
-
-loop({ok, _}) ->
-    loop(io:read(""));
-loop(eof) ->
-    init:stop();
-loop({error, _Reason}) ->
-    init:stop().
-
-main([]) ->
-    test_util:init_code_path(),
-    couch_config:start_link(test_util:config_files()),
-    couch_drv:start_link(),
-    do_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/171-os-daemons-config.t
----------------------------------------------------------------------
diff --git a/test/etap/171-os-daemons-config.t b/test/etap/171-os-daemons-config.t
deleted file mode 100755
index e9dc3f3..0000000
--- a/test/etap/171-os-daemons-config.t
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-config_files() ->
-    lists:map(fun test_util:build_file/1, [
-        "etc/couchdb/default_dev.ini"
-    ]).
-
-daemon_cmd() ->
-    test_util:source_file("test/etap/171-os-daemons-config.es").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(6),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(config_files()),
-    couch_config:set("log", "level", "debug", false),
-    couch_log:start_link(),
-    couch_os_daemons:start_link(),
-
-    % "foo" is a required name by this test.
-    couch_config:set("os_daemons", "foo", daemon_cmd(), false),
-    timer:sleep(1000),
-    
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, "foo"),
-    
-    ok.
-
-check_daemon(D, Name) ->
-    BaseName = "171-os-daemons-config.es",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.errors, [], "No errors occurred while booting."),
-    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/172-os-daemon-errors.1.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.1.sh b/test/etap/172-os-daemon-errors.1.sh
deleted file mode 100644
index 345c8b4..0000000
--- a/test/etap/172-os-daemon-errors.1.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-# 
-# Please do not make this file executable as that's the error being tested.
-
-sleep 5

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/172-os-daemon-errors.2.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.2.sh b/test/etap/172-os-daemon-errors.2.sh
deleted file mode 100755
index 256ee79..0000000
--- a/test/etap/172-os-daemon-errors.2.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-exit 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/172-os-daemon-errors.3.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.3.sh b/test/etap/172-os-daemon-errors.3.sh
deleted file mode 100755
index f5a1368..0000000
--- a/test/etap/172-os-daemon-errors.3.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-sleep 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/172-os-daemon-errors.4.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.4.sh b/test/etap/172-os-daemon-errors.4.sh
deleted file mode 100755
index 5bc10e8..0000000
--- a/test/etap/172-os-daemon-errors.4.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-sleep 2

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/172-os-daemon-errors.t
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.t b/test/etap/172-os-daemon-errors.t
deleted file mode 100755
index bde5c6f..0000000
--- a/test/etap/172-os-daemon-errors.t
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-config_files() ->
-    lists:map(fun test_util:build_file/1, [
-        "etc/couchdb/default_dev.ini"
-    ]).
-
-bad_perms() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.1.sh").
-
-die_on_boot() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.2.sh").
-
-die_quickly() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.3.sh").
-
-can_reboot() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.4.sh").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(36),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(config_files()),
-    couch_os_daemons:start_link(),
-
-    etap:diag("Daemon not executable."),
-    test_halts("foo", bad_perms(), 1000),
-
-    etap:diag("Daemon dies on boot."),
-    test_halts("bar", die_on_boot(), 1000),
-
-    etap:diag("Daemon dies quickly after boot."),
-    test_halts("baz", die_quickly(), 4000),
-    
-    etap:diag("Daemon dies, but not quickly enough to be halted."),
-    test_runs("bam", can_reboot()),
-    
-    ok.
-
-test_halts(Name, Cmd, Time) ->
-    couch_config:set("os_daemons", Name, Cmd ++ " 2> /dev/null", false),
-    timer:sleep(Time),
-    {ok, [D]} = couch_os_daemons:info([table]),
-    check_dead(D, Name, Cmd),
-    couch_config:delete("os_daemons", Name, false).
-
-test_runs(Name, Cmd) ->
-    couch_config:set("os_daemons", Name, Cmd, false),
-
-    timer:sleep(1000),
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, Name, Cmd, 0),
-    
-    % Should reboot every two seconds. We're at 1s, so wait
-    % utnil 3s to be in the middle of the next invocation's
-    % life span.
-    timer:sleep(2000),
-    {ok, [D2]} = couch_os_daemons:info([table]),
-    check_daemon(D2, Name, Cmd, 1),
-    
-    % If the kill command changed, that means we rebooted the process.
-    etap:isnt(D1#daemon.kill, D2#daemon.kill, "Kill command changed.").
-
-check_dead(D, Name, Cmd) ->
-    BaseName = filename:basename(Cmd) ++ " 2> /dev/null",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.status, halted, "Daemon has been halted."),
-    etap:is(D#daemon.errors, nil, "Errors have been disabled."),
-    etap:is(D#daemon.buf, nil, "Buffer has been switched off.").
-
-check_daemon(D, Name, Cmd, Errs) ->
-    BaseName = filename:basename(Cmd),
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.status, running, "Daemon still running."),
-    etap:is(length(D#daemon.errors), Errs, "Found expected number of errors."),
-    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/173-os-daemon-cfg-register.t
----------------------------------------------------------------------
diff --git a/test/etap/173-os-daemon-cfg-register.t b/test/etap/173-os-daemon-cfg-register.t
deleted file mode 100755
index 256ee7d..0000000
--- a/test/etap/173-os-daemon-cfg-register.t
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-daemon_name() ->
-    "wheee".
-
-daemon_cmd() ->
-    test_util:build_file("test/etap/test_cfg_register").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(27),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(test_util:config_files()),
-    couch_os_daemons:start_link(),
-    
-    DaemonCmd = daemon_cmd() ++ " 2> /dev/null",
-    
-    etap:diag("Booting the daemon"),
-    couch_config:set("os_daemons", daemon_name(), DaemonCmd, false),
-    wait_for_start(10),
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, running),
-    
-    etap:diag("Daemon restarts when section changes."),
-    couch_config:set("s1", "k", "foo", false),
-    wait_for_restart(10),
-    {ok, [D2]} = couch_os_daemons:info([table]),
-    check_daemon(D2, running),
-    etap:isnt(D2#daemon.kill, D1#daemon.kill, "Kill command shows restart."),
-
-    etap:diag("Daemon doesn't restart for ignored section key."),
-    couch_config:set("s2", "k2", "baz", false),
-    timer:sleep(1000), % Message travel time.
-    {ok, [D3]} = couch_os_daemons:info([table]),
-    etap:is(D3, D2, "Same daemon info after ignored config change."),
-    
-    etap:diag("Daemon restarts for specific section/key pairs."),
-    couch_config:set("s2", "k", "bingo", false),
-    wait_for_restart(10),
-    {ok, [D4]} = couch_os_daemons:info([table]),
-    check_daemon(D4, running),
-    etap:isnt(D4#daemon.kill, D3#daemon.kill, "Kill command changed again."),
-    
-    ok.
-
-wait_for_start(0) ->
-    throw({error, wait_for_start});
-wait_for_start(N) ->
-    case couch_os_daemons:info([table]) of
-        {ok, []} ->
-            timer:sleep(200),
-            wait_for_start(N-1);
-        _ ->
-            timer:sleep(1000)
-    end.
-
-wait_for_restart(0) ->
-    throw({error, wait_for_restart});
-wait_for_restart(N) ->
-    {ok, [D]} = couch_os_daemons:info([table]),
-    case D#daemon.status of
-        restarting ->
-            timer:sleep(200),
-            wait_for_restart(N-1);
-        _ ->
-            timer:sleep(1000)
-    end.
-
-check_daemon(D, Status) ->
-    BaseName = filename:basename(daemon_cmd()) ++ " 2> /dev/null",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, daemon_name(), "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.status, Status, "Daemon status is correct."),
-    etap:is(D#daemon.cfg_patterns, [{"s1"}, {"s2", "k"}], "Cfg patterns set"),
-    etap:is(D#daemon.errors, [], "No errors have occurred."),
-    etap:isnt(D#daemon.buf, nil, "Buffer is active.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/180-http-proxy.ini
----------------------------------------------------------------------
diff --git a/test/etap/180-http-proxy.ini b/test/etap/180-http-proxy.ini
deleted file mode 100644
index 3e2ba13..0000000
--- a/test/etap/180-http-proxy.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-; 49151 is IANA Reserved, let's assume no one is listening there
-[httpd_global_handlers]
-_error = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:49151/">>}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/180-http-proxy.t
----------------------------------------------------------------------
diff --git a/test/etap/180-http-proxy.t b/test/etap/180-http-proxy.t
deleted file mode 100755
index da67603..0000000
--- a/test/etap/180-http-proxy.t
+++ /dev/null
@@ -1,376 +0,0 @@
-#!/usr/bin/env escript
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(req, {method=get, path="", headers=[], body="", opts=[]}).
-
-server() ->
-    lists:concat([
-        "http://127.0.0.1:",
-        mochiweb_socket_server:get(couch_httpd, port),
-        "/_test/"
-    ]).
-
-proxy() ->
-    "http://127.0.0.1:" ++ integer_to_list(test_web:get_port()) ++ "/".
-
-external() -> "https://www.google.com/".
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(61),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag("Test died abnormally: ~p", [Other]),
-            etap:bail("Bad return value.")
-    end,
-    ok.
-
-check_request(Name, Req, Remote, Local) ->
-    case Remote of
-        no_remote -> ok;
-        _ -> test_web:set_assert(Remote)
-    end,
-    Url = case proplists:lookup(url, Req#req.opts) of
-        none -> server() ++ Req#req.path;
-        {url, DestUrl} -> DestUrl
-    end,
-    Opts = [{headers_as_is, true} | Req#req.opts],
-    Resp =ibrowse:send_req(
-        Url, Req#req.headers, Req#req.method, Req#req.body, Opts
-    ),
-    %etap:diag("ibrowse response: ~p", [Resp]),
-    case Local of
-        no_local -> ok;
-        _ -> etap:fun_is(Local, Resp, Name)
-    end,
-    case {Remote, Local} of
-        {no_remote, _} ->
-            ok;
-        {_, no_local} ->
-            ok;
-        _ ->
-            etap:is(test_web:check_last(), was_ok, Name ++ " - request handled")
-    end,
-    Resp.
-
-test() ->
-    ExtraConfig = [test_util:source_file("test/etap/180-http-proxy.ini")],
-    couch_server_sup:start_link(test_util:config_files() ++ ExtraConfig),
-    ibrowse:start(),
-    crypto:start(),
-
-    % start the test_web server on a random port
-    test_web:start_link(),
-    Url = lists:concat([
-        "{couch_httpd_proxy, handle_proxy_req, <<\"http://127.0.0.1:",
-        test_web:get_port(),
-        "/\">>}"
-    ]),
-    couch_config:set("httpd_global_handlers", "_test", Url, false),
-
-    % let couch_httpd restart
-    timer:sleep(100),
-
-    test_basic(),
-    test_alternate_status(),
-    test_trailing_slash(),
-    test_passes_header(),
-    test_passes_host_header(),
-    test_passes_header_back(),
-    test_rewrites_location_headers(),
-    test_doesnt_rewrite_external_locations(),
-    test_rewrites_relative_location(),
-    test_uses_same_version(),
-    test_passes_body(),
-    test_passes_eof_body_back(),
-    test_passes_chunked_body(),
-    test_passes_chunked_body_back(),
-
-    test_connect_error(),
-    
-    ok.
-
-test_basic() ->
-    Remote = fun(Req) ->
-        'GET' = Req:get(method),
-        "/" = Req:get(path),
-        0 = Req:get(body_length),
-        <<>> = Req:recv_body(),
-        {ok, {200, [{"Content-Type", "text/plain"}], "ok"}}
-    end,
-    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
-    check_request("Basic proxy test", #req{}, Remote, Local).
-
-test_alternate_status() ->
-    Remote = fun(Req) ->
-        "/alternate_status" = Req:get(path),
-        {ok, {201, [], "ok"}}
-    end,
-    Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{path="alternate_status"},
-    check_request("Alternate status", Req, Remote, Local).
-
-test_trailing_slash() ->
-    Remote = fun(Req) ->
-        "/trailing_slash/" = Req:get(path),
-        {ok, {200, [], "ok"}}
-    end,
-    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{path="trailing_slash/"},
-    check_request("Trailing slash", Req, Remote, Local).
-
-test_passes_header() ->
-    Remote = fun(Req) ->
-        "/passes_header" = Req:get(path),
-        "plankton" = Req:get_header_value("X-CouchDB-Ralph"),
-        {ok, {200, [], "ok"}}
-    end,
-    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{
-        path="passes_header",
-        headers=[{"X-CouchDB-Ralph", "plankton"}]
-    },
-    check_request("Passes header", Req, Remote, Local).
-
-test_passes_host_header() ->
-    Remote = fun(Req) ->
-        "/passes_host_header" = Req:get(path),
-        "www.google.com" = Req:get_header_value("Host"),
-        {ok, {200, [], "ok"}}
-    end,
-    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{
-        path="passes_host_header",
-        headers=[{"Host", "www.google.com"}]
-    },
-    check_request("Passes host header", Req, Remote, Local).
-
-test_passes_header_back() ->
-    Remote = fun(Req) ->
-        "/passes_header_back" = Req:get(path),
-        {ok, {200, [{"X-CouchDB-Plankton", "ralph"}], "ok"}}
-    end,
-    Local = fun
-        ({ok, "200", Headers, "ok"}) ->
-            lists:member({"X-CouchDB-Plankton", "ralph"}, Headers);
-        (_) ->
-            false
-    end,
-    Req = #req{path="passes_header_back"},
-    check_request("Passes header back", Req, Remote, Local).
-
-test_rewrites_location_headers() ->
-    etap:diag("Testing location header rewrites."),
-    do_rewrite_tests([
-        {"Location", proxy() ++ "foo/bar", server() ++ "foo/bar"},
-        {"Content-Location", proxy() ++ "bing?q=2", server() ++ "bing?q=2"},
-        {"Uri", proxy() ++ "zip#frag", server() ++ "zip#frag"},
-        {"Destination", proxy(), server()}
-    ]).
-
-test_doesnt_rewrite_external_locations() ->
-    etap:diag("Testing no rewrite of external locations."),
-    do_rewrite_tests([
-        {"Location", external() ++ "search", external() ++ "search"},
-        {"Content-Location", external() ++ "s?q=2", external() ++ "s?q=2"},
-        {"Uri", external() ++ "f#f", external() ++ "f#f"},
-        {"Destination", external() ++ "f?q=2#f", external() ++ "f?q=2#f"}
-    ]).
-
-test_rewrites_relative_location() ->
-    etap:diag("Testing relative rewrites."),
-    do_rewrite_tests([
-        {"Location", "/foo", server() ++ "foo"},
-        {"Content-Location", "bar", server() ++ "bar"},
-        {"Uri", "/zing?q=3", server() ++ "zing?q=3"},
-        {"Destination", "bing?q=stuff#yay", server() ++ "bing?q=stuff#yay"}
-    ]).
-
-do_rewrite_tests(Tests) ->
-    lists:foreach(fun({Header, Location, Url}) ->
-        do_rewrite_test(Header, Location, Url)
-    end, Tests).
-    
-do_rewrite_test(Header, Location, Url) ->
-    Remote = fun(Req) ->
-        "/rewrite_test" = Req:get(path),
-        {ok, {302, [{Header, Location}], "ok"}}
-    end,
-    Local = fun
-        ({ok, "302", Headers, "ok"}) ->
-            etap:is(
-                couch_util:get_value(Header, Headers),
-                Url,
-                "Header rewritten correctly."
-            ),
-            true;
-        (_) ->
-            false
-    end,
-    Req = #req{path="rewrite_test"},
-    Label = "Rewrite test for ",
-    check_request(Label ++ Header, Req, Remote, Local).
-
-test_uses_same_version() ->
-    Remote = fun(Req) ->
-        "/uses_same_version" = Req:get(path),
-        {1, 0} = Req:get(version),
-        {ok, {200, [], "ok"}}
-    end,
-    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{
-        path="uses_same_version",
-        opts=[{http_vsn, {1, 0}}]
-    },
-    check_request("Uses same version", Req, Remote, Local).
-
-test_passes_body() ->
-    Remote = fun(Req) ->
-        'PUT' = Req:get(method),
-        "/passes_body" = Req:get(path),
-        <<"Hooray!">> = Req:recv_body(),
-        {ok, {201, [], "ok"}}
-    end,
-    Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{
-        method=put,
-        path="passes_body",
-        body="Hooray!"
-    },
-    check_request("Passes body", Req, Remote, Local).
-
-test_passes_eof_body_back() ->
-    BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
-    Remote = fun(Req) ->
-        'GET' = Req:get(method),
-        "/passes_eof_body" = Req:get(path),
-        {raw, {200, [{"Connection", "close"}], BodyChunks}}
-    end,
-    Local = fun({ok, "200", _, "foobarbazinga"}) -> true; (_) -> false end,
-    Req = #req{path="passes_eof_body"},
-    check_request("Passes eof body", Req, Remote, Local).
-
-test_passes_chunked_body() ->
-    BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
-    Remote = fun(Req) ->
-        'POST' = Req:get(method),
-        "/passes_chunked_body" = Req:get(path),
-        RecvBody = fun
-            ({Length, Chunk}, [Chunk | Rest]) ->
-                Length = size(Chunk),
-                Rest;
-            ({0, []}, []) ->
-                ok
-        end,
-        ok = Req:stream_body(1024*1024, RecvBody, BodyChunks),
-        {ok, {201, [], "ok"}}
-    end,
-    Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
-    Req = #req{
-        method=post,
-        path="passes_chunked_body",
-        headers=[{"Transfer-Encoding", "chunked"}],
-        body=mk_chunked_body(BodyChunks)
-    },
-    check_request("Passes chunked body", Req, Remote, Local).
-
-test_passes_chunked_body_back() ->
-    Name = "Passes chunked body back",
-    Remote = fun(Req) ->
-        'GET' = Req:get(method),
-        "/passes_chunked_body_back" = Req:get(path),
-        BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
-        {chunked, {200, [{"Transfer-Encoding", "chunked"}], BodyChunks}}
-    end,
-    Req = #req{
-        path="passes_chunked_body_back",
-        opts=[{stream_to, self()}]
-    },
-
-    Resp = check_request(Name, Req, Remote, no_local),
-
-    etap:fun_is(
-        fun({ibrowse_req_id, _}) -> true; (_) -> false end,
-        Resp,
-        "Received an ibrowse request id."
-    ),
-    {_, ReqId} = Resp,
-    
-    % Grab headers from response
-    receive
-        {ibrowse_async_headers, ReqId, "200", Headers} ->
-            etap:is(
-                proplists:get_value("Transfer-Encoding", Headers),
-                "chunked",
-                "Response included the Transfer-Encoding: chunked header"
-            ),
-        ibrowse:stream_next(ReqId)
-    after 1000 ->
-        throw({error, timeout})
-    end,
-    
-    % Check body received
-    % TODO: When we upgrade to ibrowse >= 2.0.0 this check needs to
-    %       check that the chunks returned are what we sent from the
-    %       Remote test.
-    etap:diag("TODO: UPGRADE IBROWSE"),
-    etap:is(recv_body(ReqId, []), <<"foobarbazinga">>, "Decoded chunked body."),
-
-    % Check test_web server.
-    etap:is(test_web:check_last(), was_ok, Name ++ " - request handled").
-
-test_connect_error() ->
-    Local = fun({ok, "500", _Headers, _Body}) -> true; (_) -> false end,
-    Url = lists:concat([
-        "http://127.0.0.1:",
-        mochiweb_socket_server:get(couch_httpd, port),
-        "/_error"
-    ]),
-    Req = #req{opts=[{url, Url}]},
-    check_request("Connect error", Req, no_remote, Local).
-
-
-mk_chunked_body(Chunks) ->
-    mk_chunked_body(Chunks, []).
-
-mk_chunked_body([], Acc) ->
-    iolist_to_binary(lists:reverse(Acc, "0\r\n\r\n"));
-mk_chunked_body([Chunk | Rest], Acc) ->
-    Size = to_hex(size(Chunk)),
-    mk_chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
-
-to_hex(Val) ->
-    to_hex(Val, []).
-
-to_hex(0, Acc) ->
-    Acc;
-to_hex(Val, Acc) ->
-    to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
-
-hex_char(V) when V < 10 -> $0 + V;
-hex_char(V) -> $A + V - 10.
-
-recv_body(ReqId, Acc) ->
-    receive
-        {ibrowse_async_response, ReqId, Data} ->
-            recv_body(ReqId, [Data | Acc]);
-        {ibrowse_async_response_end, ReqId} ->
-            iolist_to_binary(lists:reverse(Acc));
-        Else ->
-            throw({error, unexpected_mesg, Else})
-    after 5000 ->
-        throw({error, timeout})
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/190-json-stream-parse.t
----------------------------------------------------------------------
diff --git a/test/etap/190-json-stream-parse.t b/test/etap/190-json-stream-parse.t
deleted file mode 100755
index 49ea58f..0000000
--- a/test/etap/190-json-stream-parse.t
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env escript
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(99),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag("Test died abnormally: ~p", [Other]),
-            etap:bail("Bad return value.")
-    end,
-    ok.
-
-test() ->
-    crypto:start(),
-    ok = test_raw_json_input(),
-    ok = test_1_byte_data_function(),
-    ok = test_multiple_bytes_data_function().
-
-
-test_raw_json_input() ->
-    etap:diag("Tests with raw JSON string as the input."),
-    lists:foreach(
-        fun({EJson, JsonString, Desc}) ->
-            etap:is(
-              equiv(EJson, json_stream_parse:to_ejson(JsonString)),
-              true,
-              Desc)
-        end,
-        cases()),
-    ok.
-
-
-test_1_byte_data_function() ->
-    etap:diag("Tests with a 1 byte output data function as the input."),
-    lists:foreach(
-        fun({EJson, JsonString, Desc}) ->
-            DataFun = fun() -> single_byte_data_fun(JsonString) end,
-            etap:is(
-              equiv(EJson, json_stream_parse:to_ejson(DataFun)),
-              true,
-              Desc)
-        end,
-        cases()),
-    ok.
-
-
-test_multiple_bytes_data_function() ->
-    etap:diag("Tests with a multiple bytes output data function as the input."),
-    lists:foreach(
-        fun({EJson, JsonString, Desc}) ->
-            DataFun = fun() -> multiple_bytes_data_fun(JsonString) end,
-            etap:is(
-              equiv(EJson, json_stream_parse:to_ejson(DataFun)),
-              true,
-              Desc)
-        end,
-        cases()),
-    ok.
-
-
-cases() ->
-    [
-        {1, "1", "integer numeric literial"},
-        {3.1416, "3.14160", "float numeric literal"},  % text representation may truncate, trail zeroes
-        {-1, "-1", "negative integer numeric literal"},
-        {-3.1416, "-3.14160", "negative float numeric literal"},
-        {12.0e10, "1.20000e+11", "float literal in scientific notation"},
-        {1.234E+10, "1.23400e+10", "another float literal in scientific notation"},
-        {-1.234E-10, "-1.23400e-10", "negative float literal in scientific notation"},
-        {10.0, "1.0e+01", "yet another float literal in scientific notation"},
-        {123.456, "1.23456E+2", "yet another float literal in scientific notation"},
-        {10.0, "1e1", "yet another float literal in scientific notation"},
-        {<<"foo">>, "\"foo\"", "string literal"},
-        {<<"foo", 5, "bar">>, "\"foo\\u0005bar\"", "string literal with \\u0005"},
-        {<<"">>, "\"\"", "empty string literal"},
-        {<<"\n\n\n">>, "\"\\n\\n\\n\"", "only new lines literal"},
-        {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\"",
-            "only white spaces string literal"},
-        {null, "null", "null literal"},
-        {true, "true", "true literal"},
-        {false, "false", "false literal"},
-        {<<"null">>, "\"null\"", "null string literal"},
-        {<<"true">>, "\"true\"", "true string literal"},
-        {<<"false">>, "\"false\"", "false string literal"},
-        {{[]}, "{}", "empty object literal"},
-        {{[{<<"foo">>, <<"bar">>}]}, "{\"foo\":\"bar\"}",
-            "simple object literal"},
-        {{[{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]},
-            "{\"foo\":\"bar\",\"baz\":123}", "another simple object literal"},
-        {[], "[]", "empty array literal"},
-        {[[]], "[[]]", "empty array literal inside a single element array literal"},
-        {[1, <<"foo">>], "[1,\"foo\"]", "simple non-empty array literal"},
-        {[1199344435545.0, 1], "[1199344435545.0,1]",
-             "another simple non-empty array literal"},
-        {[false, true, 321, null], "[false, true, 321, null]", "array of literals"},
-        {{[{<<"foo">>, [123]}]}, "{\"foo\":[123]}",
-             "object literal with an array valued property"},
-        {{[{<<"foo">>, {[{<<"bar">>, true}]}}]},
-            "{\"foo\":{\"bar\":true}}", "nested object literal"},
-        {{[{<<"foo">>, []}, {<<"bar">>, {[{<<"baz">>, true}]}},
-                {<<"alice">>, <<"bob">>}]},
-            "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}",
-            "complex object literal"},
-        {[-123, <<"foo">>, {[{<<"bar">>, []}]}, null],
-            "[-123,\"foo\",{\"bar\":[]},null]",
-            "complex array literal"}
-    ].
-
-
-%% Test for equivalence of Erlang terms.
-%% Due to arbitrary order of construction, equivalent objects might
-%% compare unequal as erlang terms, so we need to carefully recurse
-%% through aggregates (tuples and objects).
-equiv({Props1}, {Props2}) ->
-    equiv_object(Props1, Props2);
-equiv(L1, L2) when is_list(L1), is_list(L2) ->
-    equiv_list(L1, L2);
-equiv(N1, N2) when is_number(N1), is_number(N2) ->
-    N1 == N2;
-equiv(B1, B2) when is_binary(B1), is_binary(B2) ->
-    B1 == B2;
-equiv(true, true) ->
-    true;
-equiv(false, false) ->
-    true;
-equiv(null, null) ->
-    true.
-
-
-%% Object representation and traversal order is unknown.
-%% Use the sledgehammer and sort property lists.
-equiv_object(Props1, Props2) ->
-    L1 = lists:keysort(1, Props1),
-    L2 = lists:keysort(1, Props2),
-    Pairs = lists:zip(L1, L2),
-    true = lists:all(
-        fun({{K1, V1}, {K2, V2}}) ->
-            equiv(K1, K2) andalso equiv(V1, V2)
-        end,
-        Pairs).
-
-
-%% Recursively compare tuple elements for equivalence.
-equiv_list([], []) ->
-    true;
-equiv_list([V1 | L1], [V2 | L2]) ->
-    equiv(V1, V2) andalso equiv_list(L1, L2).
-
-
-single_byte_data_fun([]) ->
-    done;
-single_byte_data_fun([H | T]) ->
-    {<<H>>, fun() -> single_byte_data_fun(T) end}.
-
-
-multiple_bytes_data_fun([]) ->
-    done;
-multiple_bytes_data_fun(L) ->
-    N = crypto:rand_uniform(0, 7),
-    {Part, Rest} = split(L, N),
-    {list_to_binary(Part), fun() -> multiple_bytes_data_fun(Rest) end}.
-
-split(L, N) when length(L) =< N ->
-    {L, []};
-split(L, N) ->
-    take(N, L, []).
-
-take(0, L, Acc) ->
-    {lists:reverse(Acc), L};
-take(N, [H|L], Acc) ->
-    take(N - 1, L, [H | Acc]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/200-view-group-no-db-leaks.t
----------------------------------------------------------------------
diff --git a/test/etap/200-view-group-no-db-leaks.t b/test/etap/200-view-group-no-db-leaks.t
deleted file mode 100755
index b711ac8..0000000
--- a/test/etap/200-view-group-no-db-leaks.t
+++ /dev/null
@@ -1,307 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--record(db, {
-    main_pid = nil,
-    update_pid = nil,
-    compactor_pid = nil,
-    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
-    fd,
-    updater_fd,
-    fd_ref_counter,
-    header = nil,
-    committed_update_seq,
-    fulldocinfo_by_id_btree,
-    docinfo_by_seq_btree,
-    local_docs_btree,
-    update_seq,
-    name,
-    filepath,
-    validate_doc_funs = [],
-    security = [],
-    security_ptr = nil,
-    user_ctx = #user_ctx{},
-    waiting_delayed_commit = nil,
-    revs_limit = 1000,
-    fsync_options = [],
-    options = [],
-    compression,
-    before_doc_update,
-    after_doc_read
-}).
-
-test_db_name() -> <<"couch_test_view_group_db_leaks">>.
-ddoc_name() -> <<"foo">>.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(28),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    timer:sleep(1000),
-    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-
-    delete_db(),
-    create_db(),
-
-    create_docs(),
-    {ok, DDocRev} = create_design_doc(),
-
-    {ok, IndexerPid} = couch_index_server:get_index(
-        couch_mrview_index, test_db_name(), <<"_design/", (ddoc_name())/binary>>
-    ),
-    etap:is(is_pid(IndexerPid), true, "got view group pid"),
-    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
-
-    query_view(3, null, false),
-    check_db_ref_count(),
-    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
-
-    create_new_doc(<<"doc1000">>),
-    query_view(4, null, false),
-    check_db_ref_count(),
-    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
-
-    Ref1 = get_db_ref_counter(),
-    compact_db(),
-    check_db_ref_count(),
-    Ref2 = get_db_ref_counter(),
-    etap:isnt(Ref1, Ref2,  "DB ref counter changed"),
-    etap:is(false, is_process_alive(Ref1), "old DB ref counter is not alive"),
-    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
-
-    compact_view_group(),
-    check_db_ref_count(),
-    Ref3 = get_db_ref_counter(),
-    etap:is(Ref3, Ref2,  "DB ref counter didn't change"),
-    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
-
-    create_new_doc(<<"doc1001">>),
-    query_view(5, null, false),
-    check_db_ref_count(),
-    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
-
-    etap:diag("updating the design document with a new view definition"),
-    {ok, _NewDDocRev} = update_ddoc_view(DDocRev),
-
-    {ok, NewIndexerPid} = couch_index_server:get_index(
-        couch_mrview_index, test_db_name(), <<"_design/", (ddoc_name())/binary>>
-    ),
-    etap:is(is_pid(NewIndexerPid), true, "got new view group pid"),
-    etap:is(is_process_alive(NewIndexerPid), true, "new view group pid is alive"),
-    etap:isnt(NewIndexerPid, IndexerPid, "new view group has a different pid"),
-    etap:diag("querying view with ?stale=ok, must return empty row set"),
-    query_view(0, foo, ok),
-    etap:diag("querying view (without stale), must return 5 rows with value 1"),
-    query_view(5, 1, false),
-    MonRef = erlang:monitor(process, IndexerPid),
-    receive
-    {'DOWN', MonRef, _, _, _} ->
-        etap:diag("old view group is dead after ddoc update")
-    after 5000 ->
-        etap:bail("old view group is not dead after ddoc update")
-    end,
-
-    etap:diag("deleting database"),
-    MonRef2 = erlang:monitor(process, NewIndexerPid),
-    ok = couch_server:delete(test_db_name(), []),
-    receive
-    {'DOWN', MonRef2, _, _, _} ->
-        etap:diag("new view group is dead after DB deletion")
-    after 5000 ->
-        etap:bail("new view group did not die after DB deletion")
-    end,
-
-    ok = timer:sleep(1000),
-    delete_db(),
-    couch_server_sup:stop(),
-    ok.
-
-admin_user_ctx() ->
-    {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
-
-create_db() ->
-    {ok, #db{main_pid = Pid} = Db} = couch_db:create(
-        test_db_name(), [admin_user_ctx()]),
-    put(db_main_pid, Pid),
-    ok = couch_db:close(Db).
-
-delete_db() ->
-    couch_server:delete(test_db_name(), [admin_user_ctx()]).
-
-compact_db() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, _} = couch_db:start_compact(Db),
-    ok = couch_db:close(Db),
-    wait_db_compact_done(10).
-
-wait_db_compact_done(0) ->
-    etap:bail("DB compaction failed to finish.");
-wait_db_compact_done(N) ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    ok = couch_db:close(Db),
-    case is_pid(Db#db.compactor_pid) of
-    false ->
-        ok;
-    true ->
-        ok = timer:sleep(500),
-        wait_db_compact_done(N - 1)
-    end.
-
-compact_view_group() ->
-    DDoc = list_to_binary("_design/" ++ binary_to_list(ddoc_name())),
-    ok = couch_mrview:compact(test_db_name(), DDoc),
-    wait_view_compact_done(10).
-
-wait_view_compact_done(0) ->
-    etap:bail("View group compaction failed to finish.");
-wait_view_compact_done(N) ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/_design/" ++ binary_to_list(ddoc_name()) ++ "/_info",
-        [],
-        get),
-    case Code of
-        200 -> ok;
-        _ -> etap:bail("Invalid view group info.")
-    end,
-    {Info} = ejson:decode(Body),
-    {IndexInfo} = couch_util:get_value(<<"view_index">>, Info),
-    CompactRunning = couch_util:get_value(<<"compact_running">>, IndexInfo),
-    case CompactRunning of
-    false ->
-        ok;
-    true ->
-        ok = timer:sleep(500),
-        wait_view_compact_done(N - 1)
-    end.
-
-get_db_ref_counter() ->
-    {ok, #db{fd_ref_counter = Ref} = Db} = couch_db:open_int(test_db_name(), []),
-    ok = couch_db:close(Db),
-    Ref.
-
-check_db_ref_count() ->
-    {ok, #db{fd_ref_counter = Ref} = Db} = couch_db:open_int(test_db_name(), []),
-    ok = couch_db:close(Db),
-    etap:is(couch_ref_counter:count(Ref), 2,
-        "DB ref counter is only held by couch_db and couch_db_updater"),
-    ok.
-
-create_docs() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    Doc1 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc1">>},
-        {<<"value">>, 1}
-    ]}),
-    Doc2 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc2">>},
-        {<<"value">>, 2}
-
-    ]}),
-    Doc3 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc3">>},
-        {<<"value">>, 3}
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-create_design_doc() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/", (ddoc_name())/binary>>},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {<<"bar">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, null); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db),
-    {ok, Rev}.
-
-update_ddoc_view(DDocRev) ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/", (ddoc_name())/binary>>},
-        {<<"_rev">>, couch_doc:rev_to_str(DDocRev)},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {<<"bar">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, 1); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, NewRev} = couch_db:update_doc(Db, DDoc, []),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db),
-    {ok, NewRev}.
-
-create_new_doc(Id) ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    Doc666 = couch_doc:from_json_obj({[
-        {<<"_id">>, Id},
-        {<<"value">>, 999}
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [Doc666]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-db_url() ->
-    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
-    binary_to_list(test_db_name()).
-
-query_view(ExpectedRowCount, ExpectedRowValue, Stale) ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/_design/" ++ binary_to_list(ddoc_name()) ++ "/_view/bar"
-          ++ case Stale of
-                 false -> [];
-                 _ -> "?stale=" ++ atom_to_list(Stale)
-             end,
-        [],
-        get),
-    etap:is(Code, 200, "got view response"),
-    {Props} = ejson:decode(Body),
-    Rows = couch_util:get_value(<<"rows">>, Props, []),
-    etap:is(length(Rows), ExpectedRowCount, "result set has correct # of rows"),
-    lists:foreach(
-        fun({Row}) ->
-            case couch_util:get_value(<<"value">>, Row) of
-            ExpectedRowValue ->
-                ok;
-            _ ->
-                etap:bail("row has incorrect value")
-            end
-        end,
-        Rows).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/201-view-group-shutdown.t
----------------------------------------------------------------------
diff --git a/test/etap/201-view-group-shutdown.t b/test/etap/201-view-group-shutdown.t
deleted file mode 100755
index c51ec44..0000000
--- a/test/etap/201-view-group-shutdown.t
+++ /dev/null
@@ -1,293 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--record(db, {
-    main_pid = nil,
-    update_pid = nil,
-    compactor_pid = nil,
-    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
-    fd,
-    updater_fd,
-    fd_ref_counter,
-    header = nil,
-    committed_update_seq,
-    fulldocinfo_by_id_btree,
-    docinfo_by_seq_btree,
-    local_docs_btree,
-    update_seq,
-    name,
-    filepath,
-    validate_doc_funs = [],
-    security = [],
-    security_ptr = nil,
-    user_ctx = #user_ctx{},
-    waiting_delayed_commit = nil,
-    revs_limit = 1000,
-    fsync_options = [],
-    options = [],
-    compression,
-    before_doc_update,
-    after_doc_read
-}).
-
-main_db_name() -> <<"couch_test_view_group_shutdown">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(17),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    ok = couch_config:set("couchdb", "max_dbs_open", "3", false),
-    ok = couch_config:set("couchdb", "delayed_commits", "false", false),
-    crypto:start(),
-
-    % Test that while a view group is being compacted its database can not
-    % be closed by the database LRU system.
-    test_view_group_compaction(),
-
-    couch_server_sup:stop(),
-    ok.
-
-
-test_view_group_compaction() ->
-    {ok, DbWriter3} = create_db(<<"couch_test_view_group_shutdown_w3">>),
-    ok = couch_db:close(DbWriter3),
-
-    {ok, MainDb} = create_main_db(),
-    ok = couch_db:close(MainDb),
-
-    {ok, DbWriter1} = create_db(<<"couch_test_view_group_shutdown_w1">>),
-    ok = couch_db:close(DbWriter1),
-
-    {ok, DbWriter2} = create_db(<<"couch_test_view_group_shutdown_w2">>),
-    ok = couch_db:close(DbWriter2),
-
-    Writer1 = spawn_writer(DbWriter1#db.name),
-    Writer2 = spawn_writer(DbWriter2#db.name),
-    etap:is(is_process_alive(Writer1), true, "Spawned writer 1"),
-    etap:is(is_process_alive(Writer2), true, "Spawned writer 2"),
-
-    etap:is(get_writer_status(Writer1), ok, "Writer 1 opened his database"),
-    etap:is(get_writer_status(Writer2), ok, "Writer 2 opened his database"),
-
-    {ok, MonRef} = couch_mrview:compact(MainDb#db.name, <<"_design/foo">>, [monitor]),
-
-    % Add some more docs to database and trigger view update
-    {ok, MainDb2} = couch_db:open_int(MainDb#db.name, []),
-    ok = populate_main_db(MainDb2, 3, 3),
-    update_view(MainDb2#db.name, <<"_design/foo">>, <<"foo">>),
-    ok = couch_db:close(MainDb2),
-
-    % Assuming the view compaction takes more than 50ms to complete
-    ok = timer:sleep(50),
-    Writer3 = spawn_writer(DbWriter3#db.name),
-    etap:is(is_process_alive(Writer3), true, "Spawned writer 3"),
-
-    etap:is(get_writer_status(Writer3), {error, all_dbs_active},
-        "Writer 3 got {error, all_dbs_active} when opening his database"),
-
-    etap:is(is_process_alive(Writer1), true, "Writer 1 still alive"),
-    etap:is(is_process_alive(Writer2), true, "Writer 2 still alive"),
-    etap:is(is_process_alive(Writer3), true, "Writer 3 still alive"),
-
-    receive
-    {'DOWN', MonRef, process, _, normal} ->
-         etap:diag("View group compaction successful"),
-         ok;
-    {'DOWN', MonRef, process, _, _Reason} ->
-         etap:bail("Failure compacting view group")
-    end,
-
-    ok = timer:sleep(2000),
-
-    etap:is(writer_try_again(Writer3), ok,
-        "Told writer 3 to try open his database again"),
-    etap:is(get_writer_status(Writer3), ok,
-        "Writer 3 was able to open his database"),
-
-    etap:is(is_process_alive(Writer1), true, "Writer 1 still alive"),
-    etap:is(is_process_alive(Writer2), true, "Writer 2 still alive"),
-    etap:is(is_process_alive(Writer3), true, "Writer 3 still alive"),
-
-    etap:is(stop_writer(Writer1), ok, "Stopped writer 1"),
-    etap:is(stop_writer(Writer2), ok, "Stopped writer 2"),
-    etap:is(stop_writer(Writer3), ok, "Stopped writer 3"),
-
-    delete_db(MainDb),
-    delete_db(DbWriter1),
-    delete_db(DbWriter2),
-    delete_db(DbWriter3).
-
-
-create_main_db() ->
-    {ok, Db} = create_db(main_db_name()),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/foo">>},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {<<"foo">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}},
-            {<<"foo2">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}},
-            {<<"foo3">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}},
-            {<<"foo4">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}},
-            {<<"foo5">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, []),
-    ok = populate_main_db(Db, 1000, 20000),
-    update_view(Db#db.name, <<"_design/foo">>, <<"foo">>),
-    {ok, Db}.
-
-
-populate_main_db(Db, BatchSize, N) when N > 0 ->
-    Docs = lists:map(
-        fun(_) ->
-            couch_doc:from_json_obj({[
-                {<<"_id">>, couch_uuids:new()},
-                {<<"value">>, base64:encode(crypto:rand_bytes(1000))}
-            ]})
-        end,
-        lists:seq(1, BatchSize)),
-    {ok, _} = couch_db:update_docs(Db, Docs, []),
-    populate_main_db(Db, BatchSize, N - length(Docs));
-populate_main_db(_Db, _, _) ->
-    ok.
-
-
-update_view(DbName, DDocName, ViewName) ->
-    {ok, Db} = couch_db:open_int(DbName, []),
-    {ok, DDoc} = couch_db:open_doc(Db, DDocName, [ejson_body]),
-    couch_mrview:query_view(Db, DDoc, ViewName, [{stale, false}]),
-    ok = couch_db:close(Db),
-    etap:diag("View group updated").
-
-
-create_db(DbName) ->
-    {ok, Db} = couch_db:create(
-        DbName,
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]),
-    {ok, Db}.
-
-
-delete_db(#db{name = DbName, main_pid = Pid}) ->
-    ok = couch_server:delete(
-        DbName, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]),
-    MonRef = erlang:monitor(process, Pid),
-    receive
-    {'DOWN', MonRef, process, Pid, _Reason} ->
-        ok
-    after 30000 ->
-        etap:bail("Timeout deleting database")
-    end.
-
-
-spawn_writer(DbName) ->
-    Parent = self(),
-    spawn(fun() ->
-        process_flag(priority, high),
-        writer_loop(DbName, Parent)
-    end).
-
-
-get_writer_status(Writer) ->
-    Ref = make_ref(),
-    Writer ! {get_status, Ref},
-    receive
-    {db_open, Ref} ->
-        ok;
-    {db_open_error, Error, Ref} ->
-        Error
-    after 5000 ->
-        timeout
-    end.
-
-
-writer_try_again(Writer) ->
-    Ref = make_ref(),
-    Writer ! {try_again, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 5000 ->
-        timeout
-    end.
-
-
-stop_writer(Writer) ->
-    Ref = make_ref(),
-    Writer ! {stop, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 5000 ->
-        etap:bail("Timeout stopping writer process")
-    end.
-
-
-% Just keep the database open, no need to actually do something on it.
-writer_loop(DbName, Parent) ->
-    case couch_db:open_int(DbName, []) of
-    {ok, Db} ->
-        writer_loop_1(Db, Parent);
-    Error ->
-        writer_loop_2(DbName, Parent, Error)
-    end.
-
-writer_loop_1(Db, Parent) ->
-    receive
-    {get_status, Ref} ->
-        Parent ! {db_open, Ref},
-        writer_loop_1(Db, Parent);
-    {stop, Ref} ->
-        ok = couch_db:close(Db),
-        Parent ! {ok, Ref}
-    end.
-
-writer_loop_2(DbName, Parent, Error) ->
-    receive
-    {get_status, Ref} ->
-        Parent ! {db_open_error, Error, Ref},
-        writer_loop_2(DbName, Parent, Error);
-    {try_again, Ref} ->
-        Parent ! {ok, Ref},
-        writer_loop(DbName, Parent)
-    end.


[08/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/150-invalid-view-seq.t
----------------------------------------------------------------------
diff --git a/src/test/etap/150-invalid-view-seq.t b/src/test/etap/150-invalid-view-seq.t
new file mode 100755
index 0000000..681875a
--- /dev/null
+++ b/src/test/etap/150-invalid-view-seq.t
@@ -0,0 +1,183 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+test_db_name() ->
+    <<"couch_test_invalid_view_seq">>.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(10),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+%% NOTE: since during the test we stop the server,
+%%       a huge and ugly but harmless stack trace is sent to stderr
+%%
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    timer:sleep(1000),
+    delete_db(),
+    create_db(),
+
+    create_docs(),
+    create_design_doc(),
+
+    % make DB file backup
+    backup_db_file(),
+
+    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
+
+    create_new_doc(),
+    query_view_before_restore_backup(),
+
+    % restore DB file backup after querying view
+    restore_backup_db_file(),
+
+    query_view_after_restore_backup(),
+
+    delete_db(),
+    couch_server_sup:stop(),
+    ok.
+
+admin_user_ctx() ->
+    {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+create_db() ->
+    {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
+
+delete_db() ->
+    couch_server:delete(test_db_name(), [admin_user_ctx()]).
+
+create_docs() ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    Doc1 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc1">>},
+        {<<"value">>, 1}
+
+    ]}),
+    Doc2 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc2">>},
+        {<<"value">>, 2}
+
+    ]}),
+    Doc3 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc3">>},
+        {<<"value">>, 3}
+
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+create_design_doc() ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/foo">>},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"bar">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [DDoc]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+backup_db_file() ->
+    DbFile = test_util:build_file("tmp/lib/" ++
+        binary_to_list(test_db_name()) ++ ".couch"),
+    {ok, _} = file:copy(DbFile, DbFile ++ ".backup"),
+    ok.
+
+create_new_doc() ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    Doc666 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc666">>},
+        {<<"value">>, 999}
+
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc666]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+db_url() ->
+    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+    binary_to_list(test_db_name()).
+
+query_view_before_restore_backup() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/_design/foo/_view/bar", [], get),
+    etap:is(Code, 200, "Got view response before restoring backup."),
+    ViewJson = ejson:decode(Body),
+    Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
+    HasDoc1 = has_doc("doc1", Rows),
+    HasDoc2 = has_doc("doc2", Rows),
+    HasDoc3 = has_doc("doc3", Rows),
+    HasDoc666 = has_doc("doc666", Rows),
+    etap:is(HasDoc1, true, "Before backup restore, view has doc1"),
+    etap:is(HasDoc2, true, "Before backup restore, view has doc2"),
+    etap:is(HasDoc3, true, "Before backup restore, view has doc3"),
+    etap:is(HasDoc666, true, "Before backup restore, view has doc666"),
+    ok.
+
+has_doc(DocId1, Rows) ->
+    DocId = iolist_to_binary(DocId1),
+    lists:any(
+        fun({R}) -> lists:member({<<"id">>, DocId}, R) end,
+        Rows
+    ).
+
+restore_backup_db_file() ->
+    couch_server_sup:stop(),
+    timer:sleep(3000),
+    DbFile = test_util:build_file("tmp/lib/" ++
+        binary_to_list(test_db_name()) ++ ".couch"),
+    ok = file:delete(DbFile),
+    ok = file:rename(DbFile ++ ".backup", DbFile),
+    couch_server_sup:start_link(test_util:config_files()),
+    timer:sleep(1000),
+    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
+    ok.
+
+query_view_after_restore_backup() ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/_design/foo/_view/bar", [], get),
+    etap:is(Code, 200, "Got view response after restoring backup."),
+    ViewJson = ejson:decode(Body),
+    Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
+    HasDoc1 = has_doc("doc1", Rows),
+    HasDoc2 = has_doc("doc2", Rows),
+    HasDoc3 = has_doc("doc3", Rows),
+    HasDoc666 = has_doc("doc666", Rows),
+    etap:is(HasDoc1, true, "After backup restore, view has doc1"),
+    etap:is(HasDoc2, true, "After backup restore, view has doc2"),
+    etap:is(HasDoc3, true, "After backup restore, view has doc3"),
+    etap:is(HasDoc666, false, "After backup restore, view does not have doc666"),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/160-vhosts.t
----------------------------------------------------------------------
diff --git a/src/test/etap/160-vhosts.t b/src/test/etap/160-vhosts.t
new file mode 100755
index 0000000..46fdd73
--- /dev/null
+++ b/src/test/etap/160-vhosts.t
@@ -0,0 +1,371 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License.  You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+server() ->
+    lists:concat([
+        "http://127.0.0.1:", mochiweb_socket_server:get(couch_httpd, port), "/"
+    ]).
+
+dbname() -> "etap-test-db".
+admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(20),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    ibrowse:start(),
+    crypto:start(),
+
+    timer:sleep(1000),
+    couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
+    {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]),
+
+    Doc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc1">>},
+        {<<"value">>, 666}
+    ]}),
+
+    Doc1 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/doc1">>},
+        {<<"shows">>, {[
+            {<<"test">>, <<"function(doc, req) {
+    return { json: {
+        requested_path: '/' + req.requested_path.join('/'),
+        path: '/' + req.path.join('/')
+    }};
+}">>}
+        ]}},
+        {<<"rewrites">>, [
+            {[
+                {<<"from">>, <<"/">>},
+                {<<"to">>, <<"_show/test">>}
+            ]}
+        ]}
+    ]}),
+
+    {ok, _} = couch_db:update_docs(Db, [Doc, Doc1]),
+
+    couch_db:ensure_full_commit(Db),
+
+    %% end boilerplate, start test
+
+    ok = couch_config:set("vhosts", "example.com", "/etap-test-db", false),
+    ok = couch_config:set("vhosts", "*.example.com",
+            "/etap-test-db/_design/doc1/_rewrite", false),
+    ok = couch_config:set("vhosts", "example.com/test", "/etap-test-db", false),
+    ok = couch_config:set("vhosts", "example1.com",
+            "/etap-test-db/_design/doc1/_rewrite/", false),
+    ok = couch_config:set("vhosts",":appname.:dbname.example1.com",
+            "/:dbname/_design/:appname/_rewrite/", false),
+    ok = couch_config:set("vhosts", ":dbname.example1.com", "/:dbname", false),
+
+    ok = couch_config:set("vhosts", "*.example2.com", "/*", false),
+    ok = couch_config:set("vhosts", "*.example2.com/test", "/*", false),
+    ok = couch_config:set("vhosts", "*/test", "/etap-test-db", false),
+    ok = couch_config:set("vhosts", "*/test1",
+            "/etap-test-db/_design/doc1/_show/test", false),
+    ok = couch_config:set("vhosts", "example3.com", "/", false),
+
+    %% reload rules
+    couch_httpd_vhost:reload(),
+
+    test_regular_request(),
+    test_vhost_request(),
+    test_vhost_request_with_qs(),
+    test_vhost_request_with_global(),
+    test_vhost_requested_path(),
+    test_vhost_requested_path_path(),
+    test_vhost_request_wildcard(),
+    test_vhost_request_replace_var(),
+    test_vhost_request_replace_var1(),
+    test_vhost_request_replace_wildcard(),
+    test_vhost_request_path(),
+    test_vhost_request_path1(),
+    test_vhost_request_path2(),
+    test_vhost_request_path3(),
+    test_vhost_request_to_root(),
+    test_vhost_request_with_oauth(Db),
+
+    %% restart boilerplate
+    couch_db:close(Db),
+    ok = couch_server:delete(couch_db:name(Db), [admin_user_ctx()]),
+    timer:sleep(3000),
+    couch_server_sup:stop(),
+
+    ok.
+
+test_regular_request() ->
+    case ibrowse:send_req(server(), [], get, []) of
+        {ok, _, _, Body} ->
+            {Props} = ejson:decode(Body),
+            Couchdb = couch_util:get_value(<<"couchdb">>, Props),
+            Version = couch_util:get_value(<<"version">>, Props),
+            Vendor = couch_util:get_value(<<"vendor">>, Props),
+            etap:isnt(Couchdb, undefined, "Found couchdb property"),
+            etap:isnt(Version, undefined, "Found version property"),
+            etap:isnt(Vendor, undefined, "Found vendor property");
+        _Else ->
+            etap:bail("http GET / request failed")
+    end.
+
+test_vhost_request() ->
+    case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of
+        {ok, _, _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
+            etap:is(HasDbNameInfo, true, "should return database info");
+        _Else ->
+           etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_with_qs() ->
+    Url = server() ++ "doc1?revs_info=true",
+    case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of
+        {ok, _, _, Body} ->
+            {JsonProps} = ejson:decode(Body),
+            HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
+            etap:is(HasRevsInfo, true, "should return _revs_info");
+        _Else ->
+            etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_with_global() ->
+    Url2 = server() ++ "_utils/index.html",
+    case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of
+        {ok, _, _, Body2} ->
+            "<!DOCTYPE" ++ _Foo = Body2,
+            etap:is(true, true, "should serve /_utils even inside vhosts");
+        _Else ->
+            etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_requested_path() ->
+    case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
+        {ok, _, _, Body} ->
+            {Json} = ejson:decode(Body),
+            etap:is(case proplists:get_value(<<"requested_path">>, Json) of
+                <<"/">> -> true;
+                _ -> false
+            end, true, <<"requested path in req ok">>);
+        _Else ->
+            etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_requested_path_path() ->
+    case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
+        {ok, _, _, Body} ->
+            {Json} = ejson:decode(Body),
+            etap:is(case proplists:get_value(<<"path">>, Json) of
+                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+                _ -> false
+            end, true, <<"path in req ok">>);
+        _Else ->
+            etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_wildcard()->
+    case ibrowse:send_req(server(), [], get, [], [{host_header, "test.example.com"}]) of
+        {ok, _, _, Body} ->
+            {Json} = ejson:decode(Body),
+            etap:is(case proplists:get_value(<<"path">>, Json) of
+                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+                _ -> false
+            end, true, <<"wildcard  ok">>);
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+
+test_vhost_request_replace_var() ->
+    case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example1.com"}]) of
+        {ok, _, _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
+            etap:is(HasDbNameInfo, true, "should return database info");
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_replace_var1() ->
+    case ibrowse:send_req(server(), [], get, [], [{host_header, "doc1.etap-test-db.example1.com"}]) of
+        {ok, _, _, Body} ->
+            {Json} = ejson:decode(Body),
+            etap:is(case proplists:get_value(<<"path">>, Json) of
+                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+                _ -> false
+            end, true, <<"wildcard  ok">>);
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_replace_wildcard() ->
+    case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
+        {ok, _, _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
+            etap:is(HasDbNameInfo, true, "should return database info");
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_path() ->
+    Uri = server() ++ "test",
+    case ibrowse:send_req(Uri, [], get, [], [{host_header, "example.com"}]) of
+        {ok, _, _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
+            etap:is(HasDbNameInfo, true, "should return database info");
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_path1() ->
+    Url = server() ++ "test/doc1?revs_info=true",
+    case ibrowse:send_req(Url, [], get, [], []) of
+        {ok, _, _, Body} ->
+            {JsonProps} = ejson:decode(Body),
+            HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
+            etap:is(HasRevsInfo, true, "should return _revs_info");
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_path2() ->
+    Uri = server() ++ "test",
+    case ibrowse:send_req(Uri, [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
+        {ok, _, _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
+            etap:is(HasDbNameInfo, true, "should return database info");
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_path3() ->
+    Uri = server() ++ "test1",
+    case ibrowse:send_req(Uri, [], get, [], []) of
+        {ok, _, _, Body} ->
+            {Json} = ejson:decode(Body),
+            etap:is(case proplists:get_value(<<"path">>, Json) of
+                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
+                _ -> false
+            end, true, <<"path in req ok">>);
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_to_root() ->
+    Uri = server(),
+    case ibrowse:send_req(Uri, [], get, [], []) of
+        {ok, _, _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            HasCouchDBWelcome = proplists:is_defined(<<"couchdb">>, JsonBody),
+            etap:is(HasCouchDBWelcome, true, "should allow redirect to /");
+        _Else -> etap:is(false, true, <<"ibrowse fail">>)
+    end.
+
+test_vhost_request_with_oauth(Db) ->
+    {ok, AuthDb} = couch_db:create(
+        <<"tap_test_sec_db">>, [admin_user_ctx(), overwrite]),
+    PrevAuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
+    couch_config:set("couch_httpd_auth", "authentication_db", "tap_test_sec_db", false),
+    couch_config:set("oauth_token_users", "otoksec1", "joe", false),
+    couch_config:set("oauth_consumer_secrets", "consec1", "foo", false),
+    couch_config:set("oauth_token_secrets", "otoksec1", "foobar", false),
+    couch_config:set("couch_httpd_auth", "require_valid_user", "true", false),
+
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/test">>},
+        {<<"language">>, <<"javascript">>},
+        {<<"rewrites">>, [
+            {[
+                {<<"from">>, <<"foobar">>},
+                {<<"to">>, <<"_info">>}
+            ]}
+        ]}
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, []),
+
+    RewritePath = "/etap-test-db/_design/test/_rewrite/foobar",
+    ok = couch_config:set("vhosts", "oauth-example.com", RewritePath, false),
+    couch_httpd_vhost:reload(),
+
+    case ibrowse:send_req(server(), [], get, [], [{host_header, "oauth-example.com"}]) of
+        {ok, "401", _, Body} ->
+            {JsonBody} = ejson:decode(Body),
+            etap:is(
+                couch_util:get_value(<<"error">>, JsonBody),
+                <<"unauthorized">>,
+                "Request without OAuth credentials failed");
+        Error ->
+           etap:bail("Request without OAuth credentials did not fail: " ++
+               couch_util:to_list(Error))
+    end,
+
+    JoeDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"org.couchdb.user:joe">>},
+        {<<"type">>, <<"user">>},
+        {<<"name">>, <<"joe">>},
+        {<<"roles">>, []},
+        {<<"password_sha">>, <<"fe95df1ca59a9b567bdca5cbaf8412abd6e06121">>},
+        {<<"salt">>, <<"4e170ffeb6f34daecfd814dfb4001a73">>}
+    ]}),
+    {ok, _} = couch_db:update_doc(AuthDb, JoeDoc, []),
+
+    Url = "http://oauth-example.com/",
+    Consumer = {"consec1", "foo", hmac_sha1},
+    SignedParams = oauth:sign(
+        "GET", Url, [], Consumer, "otoksec1", "foobar"),
+    OAuthUrl = oauth:uri(server(), SignedParams),
+
+    case ibrowse:send_req(OAuthUrl, [], get, [], [{host_header, "oauth-example.com"}]) of
+        {ok, "200", _, Body2} ->
+            {JsonBody2} = ejson:decode(Body2),
+            etap:is(couch_util:get_value(<<"name">>, JsonBody2), <<"test">>,
+                "should return ddoc info with OAuth credentials");
+        Error2 ->
+           etap:bail("Failed to access vhost with OAuth credentials: " ++
+               couch_util:to_list(Error2))
+    end,
+
+    Consumer2 = {"consec1", "bad_secret", hmac_sha1},
+    SignedParams2 = oauth:sign(
+        "GET", Url, [], Consumer2, "otoksec1", "foobar"),
+    OAuthUrl2 = oauth:uri(server(), SignedParams2),
+
+    case ibrowse:send_req(OAuthUrl2, [], get, [], [{host_header, "oauth-example.com"}]) of
+        {ok, "401", _, Body3} ->
+            {JsonBody3} = ejson:decode(Body3),
+            etap:is(
+                couch_util:get_value(<<"error">>, JsonBody3),
+                <<"unauthorized">>,
+                "Request with bad OAuth credentials failed");
+        Error3 ->
+           etap:bail("Failed to access vhost with bad OAuth credentials: " ++
+               couch_util:to_list(Error3))
+    end,
+
+    couch_config:set("couch_httpd_auth", "authentication_db", PrevAuthDbName, false),
+    couch_config:set("couch_httpd_auth", "require_valid_user", "false", false),
+    ok = couch_server:delete(couch_db:name(AuthDb), [admin_user_ctx()]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/170-os-daemons.es
----------------------------------------------------------------------
diff --git a/src/test/etap/170-os-daemons.es b/src/test/etap/170-os-daemons.es
new file mode 100755
index 0000000..73974e9
--- /dev/null
+++ b/src/test/etap/170-os-daemons.es
@@ -0,0 +1,26 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+loop() ->
+    loop(io:read("")).
+
+loop({ok, _}) ->
+    loop(io:read(""));
+loop(eof) ->
+    stop;
+loop({error, Reason}) ->
+    throw({error, Reason}).
+
+main([]) ->
+    loop().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/170-os-daemons.t
----------------------------------------------------------------------
diff --git a/src/test/etap/170-os-daemons.t b/src/test/etap/170-os-daemons.t
new file mode 100755
index 0000000..6feaa1b
--- /dev/null
+++ b/src/test/etap/170-os-daemons.t
@@ -0,0 +1,114 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License.  You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+    port,
+    name,
+    cmd,
+    kill,
+    status=running,
+    cfg_patterns=[],
+    errors=[],
+    buf=[]
+}).
+
+config_files() ->
+    lists:map(fun test_util:build_file/1, [
+        "etc/couchdb/default_dev.ini"
+    ]).
+
+daemon_cmd() ->
+    test_util:source_file("test/etap/170-os-daemons.es").
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(49),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link(config_files()),
+    couch_os_daemons:start_link(),
+
+    etap:diag("Daemons boot after configuration added."),
+    couch_config:set("os_daemons", "foo", daemon_cmd(), false),
+    timer:sleep(1000),
+    
+    {ok, [D1]} = couch_os_daemons:info([table]),
+    check_daemon(D1, "foo"),
+
+    % Check table form
+    {ok, Tab1} = couch_os_daemons:info(),
+    [T1] = ets:tab2list(Tab1),
+    check_daemon(T1, "foo"),
+
+    etap:diag("Daemons stop after configuration removed."),
+    couch_config:delete("os_daemons", "foo", false),
+    timer:sleep(500),
+    
+    {ok, []} = couch_os_daemons:info([table]),
+    {ok, Tab2} = couch_os_daemons:info(),
+    etap:is(ets:tab2list(Tab2), [], "As table returns empty table."),
+    
+    etap:diag("Adding multiple daemons causes both to boot."),
+    couch_config:set("os_daemons", "bar", daemon_cmd(), false),
+    couch_config:set("os_daemons", "baz", daemon_cmd(), false),
+    timer:sleep(500),
+    {ok, Daemons} = couch_os_daemons:info([table]),
+    lists:foreach(fun(D) ->
+        check_daemon(D)
+    end, Daemons),
+
+    {ok, Tab3} = couch_os_daemons:info(),
+    lists:foreach(fun(D) ->
+        check_daemon(D)
+    end, ets:tab2list(Tab3)),
+    
+    etap:diag("Removing one daemon leaves the other alive."),
+    couch_config:delete("os_daemons", "bar", false),
+    timer:sleep(500),
+    
+    {ok, [D2]} = couch_os_daemons:info([table]),
+    check_daemon(D2, "baz"),
+    
+    % Check table version
+    {ok, Tab4} = couch_os_daemons:info(),
+    [T4] = ets:tab2list(Tab4),
+    check_daemon(T4, "baz"),
+    
+    ok.
+
+check_daemon(D) ->
+    check_daemon(D, D#daemon.name).
+
+check_daemon(D, Name) ->
+    BaseName = "170-os-daemons.es",
+    BaseLen = length(BaseName),
+    CmdLen = length(D#daemon.cmd),
+    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+    etap:is(CmdName, BaseName, "Command name was set correctly."),
+    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+    etap:is(D#daemon.errors, [], "No errors occurred while booting."),
+    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/171-os-daemons-config.es
----------------------------------------------------------------------
diff --git a/src/test/etap/171-os-daemons-config.es b/src/test/etap/171-os-daemons-config.es
new file mode 100755
index 0000000..b4a914e
--- /dev/null
+++ b/src/test/etap/171-os-daemons-config.es
@@ -0,0 +1,85 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+filename() ->
+    list_to_binary(test_util:source_file("test/etap/171-os-daemons-config.es")).
+
+read() ->
+    case io:get_line('') of
+        eof ->
+            stop;
+        Data ->
+            ejson:decode(Data)
+    end.
+
+write(Mesg) ->
+    Data = iolist_to_binary(ejson:encode(Mesg)),
+    io:format(binary_to_list(Data) ++ "\n", []).
+
+get_cfg(Section) ->
+    write([<<"get">>, Section]),
+    read().
+
+get_cfg(Section, Name) ->
+    write([<<"get">>, Section, Name]),
+    read().
+
+log(Mesg) ->
+    write([<<"log">>, Mesg]).
+
+log(Mesg, Level) ->
+    write([<<"log">>, Mesg, {[{<<"level">>, Level}]}]).
+
+test_get_cfg1() ->
+    FileName = filename(),
+    {[{<<"foo">>, FileName}]} = get_cfg(<<"os_daemons">>).
+
+test_get_cfg2() ->
+    FileName = filename(),
+    FileName = get_cfg(<<"os_daemons">>, <<"foo">>),
+    <<"sequential">> = get_cfg(<<"uuids">>, <<"algorithm">>).
+
+test_get_unknown_cfg() ->
+    {[]} = get_cfg(<<"aal;3p4">>),
+    null = get_cfg(<<"aal;3p4">>, <<"313234kjhsdfl">>).
+
+test_log() ->
+    log(<<"foobar!">>),
+    log(<<"some stuff!">>, <<"debug">>),
+    log(2),
+    log(true),
+    write([<<"log">>, <<"stuff">>, 2]),
+    write([<<"log">>, 3, null]),
+    write([<<"log">>, [1, 2], {[{<<"level">>, <<"debug">>}]}]),
+    write([<<"log">>, <<"true">>, {[]}]).
+
+do_tests() ->
+    test_get_cfg1(),
+    test_get_cfg2(),
+    test_get_unknown_cfg(),
+    test_log(),
+    loop(io:read("")).
+
+loop({ok, _}) ->
+    loop(io:read(""));
+loop(eof) ->
+    init:stop();
+loop({error, _Reason}) ->
+    init:stop().
+
+main([]) ->
+    test_util:init_code_path(),
+    couch_config:start_link(test_util:config_files()),
+    couch_drv:start_link(),
+    do_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/171-os-daemons-config.t
----------------------------------------------------------------------
diff --git a/src/test/etap/171-os-daemons-config.t b/src/test/etap/171-os-daemons-config.t
new file mode 100755
index 0000000..e9dc3f3
--- /dev/null
+++ b/src/test/etap/171-os-daemons-config.t
@@ -0,0 +1,74 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License.  You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+    port,
+    name,
+    cmd,
+    kill,
+    status=running,
+    cfg_patterns=[],
+    errors=[],
+    buf=[]
+}).
+
+config_files() ->
+    lists:map(fun test_util:build_file/1, [
+        "etc/couchdb/default_dev.ini"
+    ]).
+
+daemon_cmd() ->
+    test_util:source_file("test/etap/171-os-daemons-config.es").
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(6),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link(config_files()),
+    couch_config:set("log", "level", "debug", false),
+    couch_log:start_link(),
+    couch_os_daemons:start_link(),
+
+    % "foo" is a required name by this test.
+    couch_config:set("os_daemons", "foo", daemon_cmd(), false),
+    timer:sleep(1000),
+    
+    {ok, [D1]} = couch_os_daemons:info([table]),
+    check_daemon(D1, "foo"),
+    
+    ok.
+
+check_daemon(D, Name) ->
+    BaseName = "171-os-daemons-config.es",
+    BaseLen = length(BaseName),
+    CmdLen = length(D#daemon.cmd),
+    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+    etap:is(CmdName, BaseName, "Command name was set correctly."),
+    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+    etap:is(D#daemon.errors, [], "No errors occurred while booting."),
+    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/172-os-daemon-errors.1.sh
----------------------------------------------------------------------
diff --git a/src/test/etap/172-os-daemon-errors.1.sh b/src/test/etap/172-os-daemon-errors.1.sh
new file mode 100644
index 0000000..345c8b4
--- /dev/null
+++ b/src/test/etap/172-os-daemon-errors.1.sh
@@ -0,0 +1,17 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+# 
+# Please do not make this file executable as that's the error being tested.
+
+sleep 5

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/172-os-daemon-errors.2.sh
----------------------------------------------------------------------
diff --git a/src/test/etap/172-os-daemon-errors.2.sh b/src/test/etap/172-os-daemon-errors.2.sh
new file mode 100755
index 0000000..256ee79
--- /dev/null
+++ b/src/test/etap/172-os-daemon-errors.2.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+exit 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/172-os-daemon-errors.3.sh
----------------------------------------------------------------------
diff --git a/src/test/etap/172-os-daemon-errors.3.sh b/src/test/etap/172-os-daemon-errors.3.sh
new file mode 100755
index 0000000..f5a1368
--- /dev/null
+++ b/src/test/etap/172-os-daemon-errors.3.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+sleep 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/172-os-daemon-errors.4.sh
----------------------------------------------------------------------
diff --git a/src/test/etap/172-os-daemon-errors.4.sh b/src/test/etap/172-os-daemon-errors.4.sh
new file mode 100755
index 0000000..5bc10e8
--- /dev/null
+++ b/src/test/etap/172-os-daemon-errors.4.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+sleep 2

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/172-os-daemon-errors.t
----------------------------------------------------------------------
diff --git a/src/test/etap/172-os-daemon-errors.t b/src/test/etap/172-os-daemon-errors.t
new file mode 100755
index 0000000..bde5c6f
--- /dev/null
+++ b/src/test/etap/172-os-daemon-errors.t
@@ -0,0 +1,126 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License.  You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+    port,
+    name,
+    cmd,
+    kill,
+    status=running,
+    cfg_patterns=[],
+    errors=[],
+    buf=[]
+}).
+
+config_files() ->
+    lists:map(fun test_util:build_file/1, [
+        "etc/couchdb/default_dev.ini"
+    ]).
+
+bad_perms() ->
+    test_util:source_file("test/etap/172-os-daemon-errors.1.sh").
+
+die_on_boot() ->
+    test_util:source_file("test/etap/172-os-daemon-errors.2.sh").
+
+die_quickly() ->
+    test_util:source_file("test/etap/172-os-daemon-errors.3.sh").
+
+can_reboot() ->
+    test_util:source_file("test/etap/172-os-daemon-errors.4.sh").
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(36),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link(config_files()),
+    couch_os_daemons:start_link(),
+
+    etap:diag("Daemon not executable."),
+    test_halts("foo", bad_perms(), 1000),
+
+    etap:diag("Daemon dies on boot."),
+    test_halts("bar", die_on_boot(), 1000),
+
+    etap:diag("Daemon dies quickly after boot."),
+    test_halts("baz", die_quickly(), 4000),
+    
+    etap:diag("Daemon dies, but not quickly enough to be halted."),
+    test_runs("bam", can_reboot()),
+    
+    ok.
+
+test_halts(Name, Cmd, Time) ->
+    couch_config:set("os_daemons", Name, Cmd ++ " 2> /dev/null", false),
+    timer:sleep(Time),
+    {ok, [D]} = couch_os_daemons:info([table]),
+    check_dead(D, Name, Cmd),
+    couch_config:delete("os_daemons", Name, false).
+
+test_runs(Name, Cmd) ->
+    couch_config:set("os_daemons", Name, Cmd, false),
+
+    timer:sleep(1000),
+    {ok, [D1]} = couch_os_daemons:info([table]),
+    check_daemon(D1, Name, Cmd, 0),
+    
+    % Should reboot every two seconds. We're at 1s, so wait
+    % utnil 3s to be in the middle of the next invocation's
+    % life span.
+    timer:sleep(2000),
+    {ok, [D2]} = couch_os_daemons:info([table]),
+    check_daemon(D2, Name, Cmd, 1),
+    
+    % If the kill command changed, that means we rebooted the process.
+    etap:isnt(D1#daemon.kill, D2#daemon.kill, "Kill command changed.").
+
+check_dead(D, Name, Cmd) ->
+    BaseName = filename:basename(Cmd) ++ " 2> /dev/null",
+    BaseLen = length(BaseName),
+    CmdLen = length(D#daemon.cmd),
+    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+    etap:is(CmdName, BaseName, "Command name was set correctly."),
+    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+    etap:is(D#daemon.status, halted, "Daemon has been halted."),
+    etap:is(D#daemon.errors, nil, "Errors have been disabled."),
+    etap:is(D#daemon.buf, nil, "Buffer has been switched off.").
+
+check_daemon(D, Name, Cmd, Errs) ->
+    BaseName = filename:basename(Cmd),
+    BaseLen = length(BaseName),
+    CmdLen = length(D#daemon.cmd),
+    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
+    etap:is(CmdName, BaseName, "Command name was set correctly."),
+    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+    etap:is(D#daemon.status, running, "Daemon still running."),
+    etap:is(length(D#daemon.errors), Errs, "Found expected number of errors."),
+    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/173-os-daemon-cfg-register.t
----------------------------------------------------------------------
diff --git a/src/test/etap/173-os-daemon-cfg-register.t b/src/test/etap/173-os-daemon-cfg-register.t
new file mode 100755
index 0000000..256ee7d
--- /dev/null
+++ b/src/test/etap/173-os-daemon-cfg-register.t
@@ -0,0 +1,116 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License.  You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(daemon, {
+    port,
+    name,
+    cmd,
+    kill,
+    status=running,
+    cfg_patterns=[],
+    errors=[],
+    buf=[]
+}).
+
+daemon_name() ->
+    "wheee".
+
+daemon_cmd() ->
+    test_util:build_file("test/etap/test_cfg_register").
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(27),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_config:start_link(test_util:config_files()),
+    couch_os_daemons:start_link(),
+    
+    DaemonCmd = daemon_cmd() ++ " 2> /dev/null",
+    
+    etap:diag("Booting the daemon"),
+    couch_config:set("os_daemons", daemon_name(), DaemonCmd, false),
+    wait_for_start(10),
+    {ok, [D1]} = couch_os_daemons:info([table]),
+    check_daemon(D1, running),
+    
+    etap:diag("Daemon restarts when section changes."),
+    couch_config:set("s1", "k", "foo", false),
+    wait_for_restart(10),
+    {ok, [D2]} = couch_os_daemons:info([table]),
+    check_daemon(D2, running),
+    etap:isnt(D2#daemon.kill, D1#daemon.kill, "Kill command shows restart."),
+
+    etap:diag("Daemon doesn't restart for ignored section key."),
+    couch_config:set("s2", "k2", "baz", false),
+    timer:sleep(1000), % Message travel time.
+    {ok, [D3]} = couch_os_daemons:info([table]),
+    etap:is(D3, D2, "Same daemon info after ignored config change."),
+    
+    etap:diag("Daemon restarts for specific section/key pairs."),
+    couch_config:set("s2", "k", "bingo", false),
+    wait_for_restart(10),
+    {ok, [D4]} = couch_os_daemons:info([table]),
+    check_daemon(D4, running),
+    etap:isnt(D4#daemon.kill, D3#daemon.kill, "Kill command changed again."),
+    
+    ok.
+
+wait_for_start(0) ->
+    throw({error, wait_for_start});
+wait_for_start(N) ->
+    case couch_os_daemons:info([table]) of
+        {ok, []} ->
+            timer:sleep(200),
+            wait_for_start(N-1);
+        _ ->
+            timer:sleep(1000)
+    end.
+
+wait_for_restart(0) ->
+    throw({error, wait_for_restart});
+wait_for_restart(N) ->
+    {ok, [D]} = couch_os_daemons:info([table]),
+    case D#daemon.status of
+        restarting ->
+            timer:sleep(200),
+            wait_for_restart(N-1);
+        _ ->
+            timer:sleep(1000)
+    end.
+
+check_daemon(D, Status) ->
+    BaseName = filename:basename(daemon_cmd()) ++ " 2> /dev/null",
+    BaseLen = length(BaseName),
+    CmdLen = length(D#daemon.cmd),
+    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
+
+    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
+    etap:is(D#daemon.name, daemon_name(), "Daemon name was set correctly."),
+    etap:is(CmdName, BaseName, "Command name was set correctly."),
+    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
+    etap:is(D#daemon.status, Status, "Daemon status is correct."),
+    etap:is(D#daemon.cfg_patterns, [{"s1"}, {"s2", "k"}], "Cfg patterns set"),
+    etap:is(D#daemon.errors, [], "No errors have occurred."),
+    etap:isnt(D#daemon.buf, nil, "Buffer is active.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/180-http-proxy.ini
----------------------------------------------------------------------
diff --git a/src/test/etap/180-http-proxy.ini b/src/test/etap/180-http-proxy.ini
new file mode 100644
index 0000000..3e2ba13
--- /dev/null
+++ b/src/test/etap/180-http-proxy.ini
@@ -0,0 +1,20 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+; 49151 is IANA Reserved, let's assume no one is listening there
+[httpd_global_handlers]
+_error = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:49151/">>}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/180-http-proxy.t
----------------------------------------------------------------------
diff --git a/src/test/etap/180-http-proxy.t b/src/test/etap/180-http-proxy.t
new file mode 100755
index 0000000..da67603
--- /dev/null
+++ b/src/test/etap/180-http-proxy.t
@@ -0,0 +1,376 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(req, {method=get, path="", headers=[], body="", opts=[]}).
+
+server() ->
+    lists:concat([
+        "http://127.0.0.1:",
+        mochiweb_socket_server:get(couch_httpd, port),
+        "/_test/"
+    ]).
+
+proxy() ->
+    "http://127.0.0.1:" ++ integer_to_list(test_web:get_port()) ++ "/".
+
+external() -> "https://www.google.com/".
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(61),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag("Test died abnormally: ~p", [Other]),
+            etap:bail("Bad return value.")
+    end,
+    ok.
+
+check_request(Name, Req, Remote, Local) ->
+    case Remote of
+        no_remote -> ok;
+        _ -> test_web:set_assert(Remote)
+    end,
+    Url = case proplists:lookup(url, Req#req.opts) of
+        none -> server() ++ Req#req.path;
+        {url, DestUrl} -> DestUrl
+    end,
+    Opts = [{headers_as_is, true} | Req#req.opts],
+    Resp =ibrowse:send_req(
+        Url, Req#req.headers, Req#req.method, Req#req.body, Opts
+    ),
+    %etap:diag("ibrowse response: ~p", [Resp]),
+    case Local of
+        no_local -> ok;
+        _ -> etap:fun_is(Local, Resp, Name)
+    end,
+    case {Remote, Local} of
+        {no_remote, _} ->
+            ok;
+        {_, no_local} ->
+            ok;
+        _ ->
+            etap:is(test_web:check_last(), was_ok, Name ++ " - request handled")
+    end,
+    Resp.
+
+test() ->
+    ExtraConfig = [test_util:source_file("test/etap/180-http-proxy.ini")],
+    couch_server_sup:start_link(test_util:config_files() ++ ExtraConfig),
+    ibrowse:start(),
+    crypto:start(),
+
+    % start the test_web server on a random port
+    test_web:start_link(),
+    Url = lists:concat([
+        "{couch_httpd_proxy, handle_proxy_req, <<\"http://127.0.0.1:",
+        test_web:get_port(),
+        "/\">>}"
+    ]),
+    couch_config:set("httpd_global_handlers", "_test", Url, false),
+
+    % let couch_httpd restart
+    timer:sleep(100),
+
+    test_basic(),
+    test_alternate_status(),
+    test_trailing_slash(),
+    test_passes_header(),
+    test_passes_host_header(),
+    test_passes_header_back(),
+    test_rewrites_location_headers(),
+    test_doesnt_rewrite_external_locations(),
+    test_rewrites_relative_location(),
+    test_uses_same_version(),
+    test_passes_body(),
+    test_passes_eof_body_back(),
+    test_passes_chunked_body(),
+    test_passes_chunked_body_back(),
+
+    test_connect_error(),
+    
+    ok.
+
+test_basic() ->
+    Remote = fun(Req) ->
+        'GET' = Req:get(method),
+        "/" = Req:get(path),
+        0 = Req:get(body_length),
+        <<>> = Req:recv_body(),
+        {ok, {200, [{"Content-Type", "text/plain"}], "ok"}}
+    end,
+    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+    check_request("Basic proxy test", #req{}, Remote, Local).
+
+test_alternate_status() ->
+    Remote = fun(Req) ->
+        "/alternate_status" = Req:get(path),
+        {ok, {201, [], "ok"}}
+    end,
+    Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{path="alternate_status"},
+    check_request("Alternate status", Req, Remote, Local).
+
+test_trailing_slash() ->
+    Remote = fun(Req) ->
+        "/trailing_slash/" = Req:get(path),
+        {ok, {200, [], "ok"}}
+    end,
+    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{path="trailing_slash/"},
+    check_request("Trailing slash", Req, Remote, Local).
+
+test_passes_header() ->
+    Remote = fun(Req) ->
+        "/passes_header" = Req:get(path),
+        "plankton" = Req:get_header_value("X-CouchDB-Ralph"),
+        {ok, {200, [], "ok"}}
+    end,
+    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{
+        path="passes_header",
+        headers=[{"X-CouchDB-Ralph", "plankton"}]
+    },
+    check_request("Passes header", Req, Remote, Local).
+
+test_passes_host_header() ->
+    Remote = fun(Req) ->
+        "/passes_host_header" = Req:get(path),
+        "www.google.com" = Req:get_header_value("Host"),
+        {ok, {200, [], "ok"}}
+    end,
+    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{
+        path="passes_host_header",
+        headers=[{"Host", "www.google.com"}]
+    },
+    check_request("Passes host header", Req, Remote, Local).
+
+test_passes_header_back() ->
+    Remote = fun(Req) ->
+        "/passes_header_back" = Req:get(path),
+        {ok, {200, [{"X-CouchDB-Plankton", "ralph"}], "ok"}}
+    end,
+    Local = fun
+        ({ok, "200", Headers, "ok"}) ->
+            lists:member({"X-CouchDB-Plankton", "ralph"}, Headers);
+        (_) ->
+            false
+    end,
+    Req = #req{path="passes_header_back"},
+    check_request("Passes header back", Req, Remote, Local).
+
+test_rewrites_location_headers() ->
+    etap:diag("Testing location header rewrites."),
+    do_rewrite_tests([
+        {"Location", proxy() ++ "foo/bar", server() ++ "foo/bar"},
+        {"Content-Location", proxy() ++ "bing?q=2", server() ++ "bing?q=2"},
+        {"Uri", proxy() ++ "zip#frag", server() ++ "zip#frag"},
+        {"Destination", proxy(), server()}
+    ]).
+
+test_doesnt_rewrite_external_locations() ->
+    etap:diag("Testing no rewrite of external locations."),
+    do_rewrite_tests([
+        {"Location", external() ++ "search", external() ++ "search"},
+        {"Content-Location", external() ++ "s?q=2", external() ++ "s?q=2"},
+        {"Uri", external() ++ "f#f", external() ++ "f#f"},
+        {"Destination", external() ++ "f?q=2#f", external() ++ "f?q=2#f"}
+    ]).
+
+test_rewrites_relative_location() ->
+    etap:diag("Testing relative rewrites."),
+    do_rewrite_tests([
+        {"Location", "/foo", server() ++ "foo"},
+        {"Content-Location", "bar", server() ++ "bar"},
+        {"Uri", "/zing?q=3", server() ++ "zing?q=3"},
+        {"Destination", "bing?q=stuff#yay", server() ++ "bing?q=stuff#yay"}
+    ]).
+
+do_rewrite_tests(Tests) ->
+    lists:foreach(fun({Header, Location, Url}) ->
+        do_rewrite_test(Header, Location, Url)
+    end, Tests).
+    
+do_rewrite_test(Header, Location, Url) ->
+    Remote = fun(Req) ->
+        "/rewrite_test" = Req:get(path),
+        {ok, {302, [{Header, Location}], "ok"}}
+    end,
+    Local = fun
+        ({ok, "302", Headers, "ok"}) ->
+            etap:is(
+                couch_util:get_value(Header, Headers),
+                Url,
+                "Header rewritten correctly."
+            ),
+            true;
+        (_) ->
+            false
+    end,
+    Req = #req{path="rewrite_test"},
+    Label = "Rewrite test for ",
+    check_request(Label ++ Header, Req, Remote, Local).
+
+test_uses_same_version() ->
+    Remote = fun(Req) ->
+        "/uses_same_version" = Req:get(path),
+        {1, 0} = Req:get(version),
+        {ok, {200, [], "ok"}}
+    end,
+    Local = fun({ok, "200", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{
+        path="uses_same_version",
+        opts=[{http_vsn, {1, 0}}]
+    },
+    check_request("Uses same version", Req, Remote, Local).
+
+test_passes_body() ->
+    Remote = fun(Req) ->
+        'PUT' = Req:get(method),
+        "/passes_body" = Req:get(path),
+        <<"Hooray!">> = Req:recv_body(),
+        {ok, {201, [], "ok"}}
+    end,
+    Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{
+        method=put,
+        path="passes_body",
+        body="Hooray!"
+    },
+    check_request("Passes body", Req, Remote, Local).
+
+test_passes_eof_body_back() ->
+    BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+    Remote = fun(Req) ->
+        'GET' = Req:get(method),
+        "/passes_eof_body" = Req:get(path),
+        {raw, {200, [{"Connection", "close"}], BodyChunks}}
+    end,
+    Local = fun({ok, "200", _, "foobarbazinga"}) -> true; (_) -> false end,
+    Req = #req{path="passes_eof_body"},
+    check_request("Passes eof body", Req, Remote, Local).
+
+test_passes_chunked_body() ->
+    BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+    Remote = fun(Req) ->
+        'POST' = Req:get(method),
+        "/passes_chunked_body" = Req:get(path),
+        RecvBody = fun
+            ({Length, Chunk}, [Chunk | Rest]) ->
+                Length = size(Chunk),
+                Rest;
+            ({0, []}, []) ->
+                ok
+        end,
+        ok = Req:stream_body(1024*1024, RecvBody, BodyChunks),
+        {ok, {201, [], "ok"}}
+    end,
+    Local = fun({ok, "201", _, "ok"}) -> true; (_) -> false end,
+    Req = #req{
+        method=post,
+        path="passes_chunked_body",
+        headers=[{"Transfer-Encoding", "chunked"}],
+        body=mk_chunked_body(BodyChunks)
+    },
+    check_request("Passes chunked body", Req, Remote, Local).
+
+test_passes_chunked_body_back() ->
+    Name = "Passes chunked body back",
+    Remote = fun(Req) ->
+        'GET' = Req:get(method),
+        "/passes_chunked_body_back" = Req:get(path),
+        BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+        {chunked, {200, [{"Transfer-Encoding", "chunked"}], BodyChunks}}
+    end,
+    Req = #req{
+        path="passes_chunked_body_back",
+        opts=[{stream_to, self()}]
+    },
+
+    Resp = check_request(Name, Req, Remote, no_local),
+
+    etap:fun_is(
+        fun({ibrowse_req_id, _}) -> true; (_) -> false end,
+        Resp,
+        "Received an ibrowse request id."
+    ),
+    {_, ReqId} = Resp,
+    
+    % Grab headers from response
+    receive
+        {ibrowse_async_headers, ReqId, "200", Headers} ->
+            etap:is(
+                proplists:get_value("Transfer-Encoding", Headers),
+                "chunked",
+                "Response included the Transfer-Encoding: chunked header"
+            ),
+        ibrowse:stream_next(ReqId)
+    after 1000 ->
+        throw({error, timeout})
+    end,
+    
+    % Check body received
+    % TODO: When we upgrade to ibrowse >= 2.0.0 this check needs to
+    %       check that the chunks returned are what we sent from the
+    %       Remote test.
+    etap:diag("TODO: UPGRADE IBROWSE"),
+    etap:is(recv_body(ReqId, []), <<"foobarbazinga">>, "Decoded chunked body."),
+
+    % Check test_web server.
+    etap:is(test_web:check_last(), was_ok, Name ++ " - request handled").
+
+test_connect_error() ->
+    Local = fun({ok, "500", _Headers, _Body}) -> true; (_) -> false end,
+    Url = lists:concat([
+        "http://127.0.0.1:",
+        mochiweb_socket_server:get(couch_httpd, port),
+        "/_error"
+    ]),
+    Req = #req{opts=[{url, Url}]},
+    check_request("Connect error", Req, no_remote, Local).
+
+
+mk_chunked_body(Chunks) ->
+    mk_chunked_body(Chunks, []).
+
+mk_chunked_body([], Acc) ->
+    iolist_to_binary(lists:reverse(Acc, "0\r\n\r\n"));
+mk_chunked_body([Chunk | Rest], Acc) ->
+    Size = to_hex(size(Chunk)),
+    mk_chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
+
+to_hex(Val) ->
+    to_hex(Val, []).
+
+to_hex(0, Acc) ->
+    Acc;
+to_hex(Val, Acc) ->
+    to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+
+recv_body(ReqId, Acc) ->
+    receive
+        {ibrowse_async_response, ReqId, Data} ->
+            recv_body(ReqId, [Data | Acc]);
+        {ibrowse_async_response_end, ReqId} ->
+            iolist_to_binary(lists:reverse(Acc));
+        Else ->
+            throw({error, unexpected_mesg, Else})
+    after 5000 ->
+        throw({error, timeout})
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/190-json-stream-parse.t
----------------------------------------------------------------------
diff --git a/src/test/etap/190-json-stream-parse.t b/src/test/etap/190-json-stream-parse.t
new file mode 100755
index 0000000..49ea58f
--- /dev/null
+++ b/src/test/etap/190-json-stream-parse.t
@@ -0,0 +1,184 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(99),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag("Test died abnormally: ~p", [Other]),
+            etap:bail("Bad return value.")
+    end,
+    ok.
+
+test() ->
+    crypto:start(),
+    ok = test_raw_json_input(),
+    ok = test_1_byte_data_function(),
+    ok = test_multiple_bytes_data_function().
+
+
+test_raw_json_input() ->
+    etap:diag("Tests with raw JSON string as the input."),
+    lists:foreach(
+        fun({EJson, JsonString, Desc}) ->
+            etap:is(
+              equiv(EJson, json_stream_parse:to_ejson(JsonString)),
+              true,
+              Desc)
+        end,
+        cases()),
+    ok.
+
+
+test_1_byte_data_function() ->
+    etap:diag("Tests with a 1 byte output data function as the input."),
+    lists:foreach(
+        fun({EJson, JsonString, Desc}) ->
+            DataFun = fun() -> single_byte_data_fun(JsonString) end,
+            etap:is(
+              equiv(EJson, json_stream_parse:to_ejson(DataFun)),
+              true,
+              Desc)
+        end,
+        cases()),
+    ok.
+
+
+test_multiple_bytes_data_function() ->
+    etap:diag("Tests with a multiple bytes output data function as the input."),
+    lists:foreach(
+        fun({EJson, JsonString, Desc}) ->
+            DataFun = fun() -> multiple_bytes_data_fun(JsonString) end,
+            etap:is(
+              equiv(EJson, json_stream_parse:to_ejson(DataFun)),
+              true,
+              Desc)
+        end,
+        cases()),
+    ok.
+
+
+cases() ->
+    [
+        {1, "1", "integer numeric literial"},
+        {3.1416, "3.14160", "float numeric literal"},  % text representation may truncate, trail zeroes
+        {-1, "-1", "negative integer numeric literal"},
+        {-3.1416, "-3.14160", "negative float numeric literal"},
+        {12.0e10, "1.20000e+11", "float literal in scientific notation"},
+        {1.234E+10, "1.23400e+10", "another float literal in scientific notation"},
+        {-1.234E-10, "-1.23400e-10", "negative float literal in scientific notation"},
+        {10.0, "1.0e+01", "yet another float literal in scientific notation"},
+        {123.456, "1.23456E+2", "yet another float literal in scientific notation"},
+        {10.0, "1e1", "yet another float literal in scientific notation"},
+        {<<"foo">>, "\"foo\"", "string literal"},
+        {<<"foo", 5, "bar">>, "\"foo\\u0005bar\"", "string literal with \\u0005"},
+        {<<"">>, "\"\"", "empty string literal"},
+        {<<"\n\n\n">>, "\"\\n\\n\\n\"", "only new lines literal"},
+        {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\"",
+            "only white spaces string literal"},
+        {null, "null", "null literal"},
+        {true, "true", "true literal"},
+        {false, "false", "false literal"},
+        {<<"null">>, "\"null\"", "null string literal"},
+        {<<"true">>, "\"true\"", "true string literal"},
+        {<<"false">>, "\"false\"", "false string literal"},
+        {{[]}, "{}", "empty object literal"},
+        {{[{<<"foo">>, <<"bar">>}]}, "{\"foo\":\"bar\"}",
+            "simple object literal"},
+        {{[{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]},
+            "{\"foo\":\"bar\",\"baz\":123}", "another simple object literal"},
+        {[], "[]", "empty array literal"},
+        {[[]], "[[]]", "empty array literal inside a single element array literal"},
+        {[1, <<"foo">>], "[1,\"foo\"]", "simple non-empty array literal"},
+        {[1199344435545.0, 1], "[1199344435545.0,1]",
+             "another simple non-empty array literal"},
+        {[false, true, 321, null], "[false, true, 321, null]", "array of literals"},
+        {{[{<<"foo">>, [123]}]}, "{\"foo\":[123]}",
+             "object literal with an array valued property"},
+        {{[{<<"foo">>, {[{<<"bar">>, true}]}}]},
+            "{\"foo\":{\"bar\":true}}", "nested object literal"},
+        {{[{<<"foo">>, []}, {<<"bar">>, {[{<<"baz">>, true}]}},
+                {<<"alice">>, <<"bob">>}]},
+            "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}",
+            "complex object literal"},
+        {[-123, <<"foo">>, {[{<<"bar">>, []}]}, null],
+            "[-123,\"foo\",{\"bar\":[]},null]",
+            "complex array literal"}
+    ].
+
+
+%% Test for equivalence of Erlang terms.
+%% Due to arbitrary order of construction, equivalent objects might
+%% compare unequal as erlang terms, so we need to carefully recurse
+%% through aggregates (tuples and objects).
+equiv({Props1}, {Props2}) ->
+    equiv_object(Props1, Props2);
+equiv(L1, L2) when is_list(L1), is_list(L2) ->
+    equiv_list(L1, L2);
+equiv(N1, N2) when is_number(N1), is_number(N2) ->
+    N1 == N2;
+equiv(B1, B2) when is_binary(B1), is_binary(B2) ->
+    B1 == B2;
+equiv(true, true) ->
+    true;
+equiv(false, false) ->
+    true;
+equiv(null, null) ->
+    true.
+
+
+%% Object representation and traversal order is unknown.
+%% Use the sledgehammer and sort property lists.
+equiv_object(Props1, Props2) ->
+    L1 = lists:keysort(1, Props1),
+    L2 = lists:keysort(1, Props2),
+    Pairs = lists:zip(L1, L2),
+    true = lists:all(
+        fun({{K1, V1}, {K2, V2}}) ->
+            equiv(K1, K2) andalso equiv(V1, V2)
+        end,
+        Pairs).
+
+
+%% Recursively compare tuple elements for equivalence.
+equiv_list([], []) ->
+    true;
+equiv_list([V1 | L1], [V2 | L2]) ->
+    equiv(V1, V2) andalso equiv_list(L1, L2).
+
+
+single_byte_data_fun([]) ->
+    done;
+single_byte_data_fun([H | T]) ->
+    {<<H>>, fun() -> single_byte_data_fun(T) end}.
+
+
+multiple_bytes_data_fun([]) ->
+    done;
+multiple_bytes_data_fun(L) ->
+    N = crypto:rand_uniform(0, 7),
+    {Part, Rest} = split(L, N),
+    {list_to_binary(Part), fun() -> multiple_bytes_data_fun(Rest) end}.
+
+split(L, N) when length(L) =< N ->
+    {L, []};
+split(L, N) ->
+    take(N, L, []).
+
+take(0, L, Acc) ->
+    {lists:reverse(Acc), L};
+take(N, [H|L], Acc) ->
+    take(N - 1, L, [H | Acc]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/200-view-group-no-db-leaks.t
----------------------------------------------------------------------
diff --git a/src/test/etap/200-view-group-no-db-leaks.t b/src/test/etap/200-view-group-no-db-leaks.t
new file mode 100755
index 0000000..b711ac8
--- /dev/null
+++ b/src/test/etap/200-view-group-no-db-leaks.t
@@ -0,0 +1,307 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+-record(db, {
+    main_pid = nil,
+    update_pid = nil,
+    compactor_pid = nil,
+    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
+    fd,
+    updater_fd,
+    fd_ref_counter,
+    header = nil,
+    committed_update_seq,
+    fulldocinfo_by_id_btree,
+    docinfo_by_seq_btree,
+    local_docs_btree,
+    update_seq,
+    name,
+    filepath,
+    validate_doc_funs = [],
+    security = [],
+    security_ptr = nil,
+    user_ctx = #user_ctx{},
+    waiting_delayed_commit = nil,
+    revs_limit = 1000,
+    fsync_options = [],
+    options = [],
+    compression,
+    before_doc_update,
+    after_doc_read
+}).
+
+test_db_name() -> <<"couch_test_view_group_db_leaks">>.
+ddoc_name() -> <<"foo">>.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(28),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    timer:sleep(1000),
+    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
+    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
+
+    delete_db(),
+    create_db(),
+
+    create_docs(),
+    {ok, DDocRev} = create_design_doc(),
+
+    {ok, IndexerPid} = couch_index_server:get_index(
+        couch_mrview_index, test_db_name(), <<"_design/", (ddoc_name())/binary>>
+    ),
+    etap:is(is_pid(IndexerPid), true, "got view group pid"),
+    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
+
+    query_view(3, null, false),
+    check_db_ref_count(),
+    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
+
+    create_new_doc(<<"doc1000">>),
+    query_view(4, null, false),
+    check_db_ref_count(),
+    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
+
+    Ref1 = get_db_ref_counter(),
+    compact_db(),
+    check_db_ref_count(),
+    Ref2 = get_db_ref_counter(),
+    etap:isnt(Ref1, Ref2,  "DB ref counter changed"),
+    etap:is(false, is_process_alive(Ref1), "old DB ref counter is not alive"),
+    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
+
+    compact_view_group(),
+    check_db_ref_count(),
+    Ref3 = get_db_ref_counter(),
+    etap:is(Ref3, Ref2,  "DB ref counter didn't change"),
+    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
+
+    create_new_doc(<<"doc1001">>),
+    query_view(5, null, false),
+    check_db_ref_count(),
+    etap:is(is_process_alive(IndexerPid), true, "view group pid is alive"),
+
+    etap:diag("updating the design document with a new view definition"),
+    {ok, _NewDDocRev} = update_ddoc_view(DDocRev),
+
+    {ok, NewIndexerPid} = couch_index_server:get_index(
+        couch_mrview_index, test_db_name(), <<"_design/", (ddoc_name())/binary>>
+    ),
+    etap:is(is_pid(NewIndexerPid), true, "got new view group pid"),
+    etap:is(is_process_alive(NewIndexerPid), true, "new view group pid is alive"),
+    etap:isnt(NewIndexerPid, IndexerPid, "new view group has a different pid"),
+    etap:diag("querying view with ?stale=ok, must return empty row set"),
+    query_view(0, foo, ok),
+    etap:diag("querying view (without stale), must return 5 rows with value 1"),
+    query_view(5, 1, false),
+    MonRef = erlang:monitor(process, IndexerPid),
+    receive
+    {'DOWN', MonRef, _, _, _} ->
+        etap:diag("old view group is dead after ddoc update")
+    after 5000 ->
+        etap:bail("old view group is not dead after ddoc update")
+    end,
+
+    etap:diag("deleting database"),
+    MonRef2 = erlang:monitor(process, NewIndexerPid),
+    ok = couch_server:delete(test_db_name(), []),
+    receive
+    {'DOWN', MonRef2, _, _, _} ->
+        etap:diag("new view group is dead after DB deletion")
+    after 5000 ->
+        etap:bail("new view group did not die after DB deletion")
+    end,
+
+    ok = timer:sleep(1000),
+    delete_db(),
+    couch_server_sup:stop(),
+    ok.
+
+admin_user_ctx() ->
+    {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
+
+create_db() ->
+    {ok, #db{main_pid = Pid} = Db} = couch_db:create(
+        test_db_name(), [admin_user_ctx()]),
+    put(db_main_pid, Pid),
+    ok = couch_db:close(Db).
+
+delete_db() ->
+    couch_server:delete(test_db_name(), [admin_user_ctx()]).
+
+compact_db() ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    {ok, _} = couch_db:start_compact(Db),
+    ok = couch_db:close(Db),
+    wait_db_compact_done(10).
+
+wait_db_compact_done(0) ->
+    etap:bail("DB compaction failed to finish.");
+wait_db_compact_done(N) ->
+    {ok, Db} = couch_db:open_int(test_db_name(), []),
+    ok = couch_db:close(Db),
+    case is_pid(Db#db.compactor_pid) of
+    false ->
+        ok;
+    true ->
+        ok = timer:sleep(500),
+        wait_db_compact_done(N - 1)
+    end.
+
+compact_view_group() ->
+    DDoc = list_to_binary("_design/" ++ binary_to_list(ddoc_name())),
+    ok = couch_mrview:compact(test_db_name(), DDoc),
+    wait_view_compact_done(10).
+
+wait_view_compact_done(0) ->
+    etap:bail("View group compaction failed to finish.");
+wait_view_compact_done(N) ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/_design/" ++ binary_to_list(ddoc_name()) ++ "/_info",
+        [],
+        get),
+    case Code of
+        200 -> ok;
+        _ -> etap:bail("Invalid view group info.")
+    end,
+    {Info} = ejson:decode(Body),
+    {IndexInfo} = couch_util:get_value(<<"view_index">>, Info),
+    CompactRunning = couch_util:get_value(<<"compact_running">>, IndexInfo),
+    case CompactRunning of
+    false ->
+        ok;
+    true ->
+        ok = timer:sleep(500),
+        wait_view_compact_done(N - 1)
+    end.
+
+get_db_ref_counter() ->
+    {ok, #db{fd_ref_counter = Ref} = Db} = couch_db:open_int(test_db_name(), []),
+    ok = couch_db:close(Db),
+    Ref.
+
+check_db_ref_count() ->
+    {ok, #db{fd_ref_counter = Ref} = Db} = couch_db:open_int(test_db_name(), []),
+    ok = couch_db:close(Db),
+    etap:is(couch_ref_counter:count(Ref), 2,
+        "DB ref counter is only held by couch_db and couch_db_updater"),
+    ok.
+
+create_docs() ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    Doc1 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc1">>},
+        {<<"value">>, 1}
+    ]}),
+    Doc2 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc2">>},
+        {<<"value">>, 2}
+
+    ]}),
+    Doc3 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc3">>},
+        {<<"value">>, 3}
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+create_design_doc() ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/", (ddoc_name())/binary>>},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"bar">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, null); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+    {ok, Rev}.
+
+update_ddoc_view(DDocRev) ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/", (ddoc_name())/binary>>},
+        {<<"_rev">>, couch_doc:rev_to_str(DDocRev)},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"bar">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, 1); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, NewRev} = couch_db:update_doc(Db, DDoc, []),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+    {ok, NewRev}.
+
+create_new_doc(Id) ->
+    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
+    Doc666 = couch_doc:from_json_obj({[
+        {<<"_id">>, Id},
+        {<<"value">>, 999}
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc666]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+db_url() ->
+    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
+    binary_to_list(test_db_name()).
+
+query_view(ExpectedRowCount, ExpectedRowValue, Stale) ->
+    {ok, Code, _Headers, Body} = test_util:request(
+        db_url() ++ "/_design/" ++ binary_to_list(ddoc_name()) ++ "/_view/bar"
+          ++ case Stale of
+                 false -> [];
+                 _ -> "?stale=" ++ atom_to_list(Stale)
+             end,
+        [],
+        get),
+    etap:is(Code, 200, "got view response"),
+    {Props} = ejson:decode(Body),
+    Rows = couch_util:get_value(<<"rows">>, Props, []),
+    etap:is(length(Rows), ExpectedRowCount, "result set has correct # of rows"),
+    lists:foreach(
+        fun({Row}) ->
+            case couch_util:get_value(<<"value">>, Row) of
+            ExpectedRowValue ->
+                ok;
+            _ ->
+                etap:bail("row has incorrect value")
+            end
+        end,
+        Rows).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/201-view-group-shutdown.t
----------------------------------------------------------------------
diff --git a/src/test/etap/201-view-group-shutdown.t b/src/test/etap/201-view-group-shutdown.t
new file mode 100755
index 0000000..c51ec44
--- /dev/null
+++ b/src/test/etap/201-view-group-shutdown.t
@@ -0,0 +1,293 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+-record(db, {
+    main_pid = nil,
+    update_pid = nil,
+    compactor_pid = nil,
+    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
+    fd,
+    updater_fd,
+    fd_ref_counter,
+    header = nil,
+    committed_update_seq,
+    fulldocinfo_by_id_btree,
+    docinfo_by_seq_btree,
+    local_docs_btree,
+    update_seq,
+    name,
+    filepath,
+    validate_doc_funs = [],
+    security = [],
+    security_ptr = nil,
+    user_ctx = #user_ctx{},
+    waiting_delayed_commit = nil,
+    revs_limit = 1000,
+    fsync_options = [],
+    options = [],
+    compression,
+    before_doc_update,
+    after_doc_read
+}).
+
+main_db_name() -> <<"couch_test_view_group_shutdown">>.
+
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(17),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    ok = couch_config:set("couchdb", "max_dbs_open", "3", false),
+    ok = couch_config:set("couchdb", "delayed_commits", "false", false),
+    crypto:start(),
+
+    % Test that while a view group is being compacted its database can not
+    % be closed by the database LRU system.
+    test_view_group_compaction(),
+
+    couch_server_sup:stop(),
+    ok.
+
+
+test_view_group_compaction() ->
+    {ok, DbWriter3} = create_db(<<"couch_test_view_group_shutdown_w3">>),
+    ok = couch_db:close(DbWriter3),
+
+    {ok, MainDb} = create_main_db(),
+    ok = couch_db:close(MainDb),
+
+    {ok, DbWriter1} = create_db(<<"couch_test_view_group_shutdown_w1">>),
+    ok = couch_db:close(DbWriter1),
+
+    {ok, DbWriter2} = create_db(<<"couch_test_view_group_shutdown_w2">>),
+    ok = couch_db:close(DbWriter2),
+
+    Writer1 = spawn_writer(DbWriter1#db.name),
+    Writer2 = spawn_writer(DbWriter2#db.name),
+    etap:is(is_process_alive(Writer1), true, "Spawned writer 1"),
+    etap:is(is_process_alive(Writer2), true, "Spawned writer 2"),
+
+    etap:is(get_writer_status(Writer1), ok, "Writer 1 opened his database"),
+    etap:is(get_writer_status(Writer2), ok, "Writer 2 opened his database"),
+
+    {ok, MonRef} = couch_mrview:compact(MainDb#db.name, <<"_design/foo">>, [monitor]),
+
+    % Add some more docs to database and trigger view update
+    {ok, MainDb2} = couch_db:open_int(MainDb#db.name, []),
+    ok = populate_main_db(MainDb2, 3, 3),
+    update_view(MainDb2#db.name, <<"_design/foo">>, <<"foo">>),
+    ok = couch_db:close(MainDb2),
+
+    % Assuming the view compaction takes more than 50ms to complete
+    ok = timer:sleep(50),
+    Writer3 = spawn_writer(DbWriter3#db.name),
+    etap:is(is_process_alive(Writer3), true, "Spawned writer 3"),
+
+    etap:is(get_writer_status(Writer3), {error, all_dbs_active},
+        "Writer 3 got {error, all_dbs_active} when opening his database"),
+
+    etap:is(is_process_alive(Writer1), true, "Writer 1 still alive"),
+    etap:is(is_process_alive(Writer2), true, "Writer 2 still alive"),
+    etap:is(is_process_alive(Writer3), true, "Writer 3 still alive"),
+
+    receive
+    {'DOWN', MonRef, process, _, normal} ->
+         etap:diag("View group compaction successful"),
+         ok;
+    {'DOWN', MonRef, process, _, _Reason} ->
+         etap:bail("Failure compacting view group")
+    end,
+
+    ok = timer:sleep(2000),
+
+    etap:is(writer_try_again(Writer3), ok,
+        "Told writer 3 to try open his database again"),
+    etap:is(get_writer_status(Writer3), ok,
+        "Writer 3 was able to open his database"),
+
+    etap:is(is_process_alive(Writer1), true, "Writer 1 still alive"),
+    etap:is(is_process_alive(Writer2), true, "Writer 2 still alive"),
+    etap:is(is_process_alive(Writer3), true, "Writer 3 still alive"),
+
+    etap:is(stop_writer(Writer1), ok, "Stopped writer 1"),
+    etap:is(stop_writer(Writer2), ok, "Stopped writer 2"),
+    etap:is(stop_writer(Writer3), ok, "Stopped writer 3"),
+
+    delete_db(MainDb),
+    delete_db(DbWriter1),
+    delete_db(DbWriter2),
+    delete_db(DbWriter3).
+
+
+create_main_db() ->
+    {ok, Db} = create_db(main_db_name()),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/foo">>},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"foo">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}},
+            {<<"foo2">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}},
+            {<<"foo3">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}},
+            {<<"foo4">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}},
+            {<<"foo5">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, []),
+    ok = populate_main_db(Db, 1000, 20000),
+    update_view(Db#db.name, <<"_design/foo">>, <<"foo">>),
+    {ok, Db}.
+
+
+populate_main_db(Db, BatchSize, N) when N > 0 ->
+    Docs = lists:map(
+        fun(_) ->
+            couch_doc:from_json_obj({[
+                {<<"_id">>, couch_uuids:new()},
+                {<<"value">>, base64:encode(crypto:rand_bytes(1000))}
+            ]})
+        end,
+        lists:seq(1, BatchSize)),
+    {ok, _} = couch_db:update_docs(Db, Docs, []),
+    populate_main_db(Db, BatchSize, N - length(Docs));
+populate_main_db(_Db, _, _) ->
+    ok.
+
+
+update_view(DbName, DDocName, ViewName) ->
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, DDoc} = couch_db:open_doc(Db, DDocName, [ejson_body]),
+    couch_mrview:query_view(Db, DDoc, ViewName, [{stale, false}]),
+    ok = couch_db:close(Db),
+    etap:diag("View group updated").
+
+
+create_db(DbName) ->
+    {ok, Db} = couch_db:create(
+        DbName,
+        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]),
+    {ok, Db}.
+
+
+delete_db(#db{name = DbName, main_pid = Pid}) ->
+    ok = couch_server:delete(
+        DbName, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]),
+    MonRef = erlang:monitor(process, Pid),
+    receive
+    {'DOWN', MonRef, process, Pid, _Reason} ->
+        ok
+    after 30000 ->
+        etap:bail("Timeout deleting database")
+    end.
+
+
+spawn_writer(DbName) ->
+    Parent = self(),
+    spawn(fun() ->
+        process_flag(priority, high),
+        writer_loop(DbName, Parent)
+    end).
+
+
+get_writer_status(Writer) ->
+    Ref = make_ref(),
+    Writer ! {get_status, Ref},
+    receive
+    {db_open, Ref} ->
+        ok;
+    {db_open_error, Error, Ref} ->
+        Error
+    after 5000 ->
+        timeout
+    end.
+
+
+writer_try_again(Writer) ->
+    Ref = make_ref(),
+    Writer ! {try_again, Ref},
+    receive
+    {ok, Ref} ->
+        ok
+    after 5000 ->
+        timeout
+    end.
+
+
+stop_writer(Writer) ->
+    Ref = make_ref(),
+    Writer ! {stop, Ref},
+    receive
+    {ok, Ref} ->
+        ok
+    after 5000 ->
+        etap:bail("Timeout stopping writer process")
+    end.
+
+
+% Just keep the database open, no need to actually do something on it.
+writer_loop(DbName, Parent) ->
+    case couch_db:open_int(DbName, []) of
+    {ok, Db} ->
+        writer_loop_1(Db, Parent);
+    Error ->
+        writer_loop_2(DbName, Parent, Error)
+    end.
+
+writer_loop_1(Db, Parent) ->
+    receive
+    {get_status, Ref} ->
+        Parent ! {db_open, Ref},
+        writer_loop_1(Db, Parent);
+    {stop, Ref} ->
+        ok = couch_db:close(Db),
+        Parent ! {ok, Ref}
+    end.
+
+writer_loop_2(DbName, Parent, Error) ->
+    receive
+    {get_status, Ref} ->
+        Parent ! {db_open_error, Error, Ref},
+        writer_loop_2(DbName, Parent, Error);
+    {try_again, Ref} ->
+        Parent ! {ok, Ref},
+        writer_loop(DbName, Parent)
+    end.


[02/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/210-os-proc-pool.t
----------------------------------------------------------------------
diff --git a/test/etap/210-os-proc-pool.t b/test/etap/210-os-proc-pool.t
deleted file mode 100755
index d80707e..0000000
--- a/test/etap/210-os-proc-pool.t
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(21),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    couch_config:set("query_server_config", "os_process_limit", "3", false),
-
-    test_pool_full(),
-    test_client_unexpected_exit(),
-
-    couch_server_sup:stop(),
-    ok.
-
-
-test_pool_full() ->
-    Client1 = spawn_client(),
-    Client2 = spawn_client(),
-    Client3 = spawn_client(),
-
-    etap:diag("Check that we can spawn the max number of processes."),
-    etap:is(ping_client(Client1), ok, "Client 1 started ok."),
-    etap:is(ping_client(Client2), ok, "Client 2 started ok."),
-    etap:is(ping_client(Client3), ok, "Client 3 started ok."),
-
-    Proc1 = get_client_proc(Client1, "1"),
-    Proc2 = get_client_proc(Client2, "2"),
-    Proc3 = get_client_proc(Client3, "3"),
-    etap:isnt(Proc1, Proc2, "Clients 1 and 2 got different procs."),
-    etap:isnt(Proc2, Proc3, "Clients 2 and 3 got different procs."),
-    etap:isnt(Proc1, Proc3, "Clients 1 and 3 got different procs."),
-
-    etap:diag("Check that client 4 blocks waiting for a process."),
-    Client4 = spawn_client(),
-    etap:is(ping_client(Client4), timeout, "Client 4 blocked while waiting."),
-
-    etap:diag("Check that stopping a client gives up its process."),
-    etap:is(stop_client(Client1), ok, "First client stopped."),
-
-    etap:diag("And check that our blocked process has been unblocked."),
-    etap:is(ping_client(Client4), ok, "Client was unblocked."),
-
-    Proc4 = get_client_proc(Client4, "4"),
-    etap:is(Proc4, Proc1, "Client 4 got proc that client 1 got before."),
-
-    lists:map(fun(C) -> ok = stop_client(C) end, [Client2, Client3, Client4]).
-
-
-test_client_unexpected_exit() ->
-    Client1 = spawn_client(),
-    Client2 = spawn_client(),
-    Client3 = spawn_client(),
-
-    etap:diag("Check that up to os_process_limit clients started."),
-    etap:is(ping_client(Client1), ok, "Client 1 started ok."),
-    etap:is(ping_client(Client2), ok, "Client 2 started ok."),
-    etap:is(ping_client(Client3), ok, "Client 3 started ok."),
-
-    Proc1 = get_client_proc(Client1, "1"),
-    Proc2 = get_client_proc(Client2, "2"),
-    Proc3 = get_client_proc(Client3, "3"),
-    etap:isnt(Proc1, Proc2, "Clients 1 and 2 got different procs."),
-    etap:isnt(Proc2, Proc3, "Clients 2 and 3 got different procs."),
-    etap:isnt(Proc1, Proc3, "Clients 1 and 3 got different procs."),
-
-    etap:diag("Check that killing a client frees an os_process."),
-    etap:is(kill_client(Client1), ok, "Client 1 died all right."),
-
-    etap:diag("Check that a new client is not blocked on boot."),
-    Client4 = spawn_client(),
-    etap:is(ping_client(Client4), ok, "New client booted without blocking."),
-
-    Proc4 = get_client_proc(Client4, "4"),
-    etap:isnt(Proc4, Proc1,
-        "Client 4 got a proc different from the one client 1 got before."),
-    etap:isnt(Proc4, Proc2, "Client 4's proc different from client 2's proc."),
-    etap:isnt(Proc4, Proc3, "Client 4's proc different from client 3's proc."),
-
-    lists:map(fun(C) -> ok = stop_client(C) end, [Client2, Client3, Client4]).
-
-
-spawn_client() ->
-    Parent = self(),
-    Ref = make_ref(),
-    Pid = spawn(fun() ->
-        Proc = couch_query_servers:get_os_process(<<"javascript">>),
-        loop(Parent, Ref, Proc)
-    end),
-    {Pid, Ref}.
-
-
-ping_client({Pid, Ref}) ->
-    Pid ! ping,
-    receive
-        {pong, Ref} -> ok
-        after 3000 -> timeout
-    end.
-
-
-get_client_proc({Pid, Ref}, ClientName) ->
-    Pid ! get_proc,
-    receive
-        {proc, Ref, Proc} -> Proc
-    after 3000 ->
-        etap:bail("Timeout getting client " ++ ClientName ++ " proc.")
-    end.
-
-
-stop_client({Pid, Ref}) ->
-    Pid ! stop,
-    receive
-        {stop, Ref} -> ok
-        after 3000 -> timeout
-    end.
-
-
-kill_client({Pid, Ref}) ->
-    Pid ! die,
-    receive
-        {die, Ref} -> ok
-        after 3000 -> timeout
-    end.
-
-
-loop(Parent, Ref, Proc) ->
-    receive
-        ping ->
-            Parent ! {pong, Ref},
-            loop(Parent, Ref, Proc);
-        get_proc  ->
-            Parent ! {proc, Ref, Proc},
-            loop(Parent, Ref, Proc);
-        stop ->
-            couch_query_servers:ret_os_process(Proc),
-            Parent ! {stop, Ref};
-        die ->
-            Parent ! {die, Ref},
-            exit(some_error)
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/220-compaction-daemon.t
----------------------------------------------------------------------
diff --git a/test/etap/220-compaction-daemon.t b/test/etap/220-compaction-daemon.t
deleted file mode 100755
index 4c63b66..0000000
--- a/test/etap/220-compaction-daemon.t
+++ /dev/null
@@ -1,225 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-test_db_name() ->
-    <<"couch_test_compaction_daemon">>.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(10),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    timer:sleep(1000),
-    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-
-    disable_compact_daemon(),
-
-    delete_db(),
-    {ok, Db} = create_db(),
-
-    add_design_doc(Db),
-    couch_db:close(Db),
-    populate(70, 70, 200 * 1024),
-
-    {_, DbFileSize} = get_db_frag(),
-    {_, ViewFileSize} = get_view_frag(),
-
-    % enable automatic compaction
-    ok = couch_config:set("compaction_daemon", "check_interval", "3", false),
-    ok = couch_config:set("compaction_daemon", "min_file_size", "100000", false),
-    ok = couch_config:set(
-        "compactions",
-        binary_to_list(test_db_name()),
-        "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]",
-        false),
-
-    ok = timer:sleep(4000), % something >= check_interval
-    wait_compaction_finished(),
-
-    {DbFrag2, DbFileSize2} = get_db_frag(),
-    {ViewFrag2, ViewFileSize2} = get_view_frag(),
-
-    etap:is(true, (DbFrag2 < 70), "Database fragmentation is < 70% after compaction"),
-    etap:is(true, (ViewFrag2 < 70), "View fragmentation is < 70% after compaction"),
-    etap:is(true, (DbFileSize2 < DbFileSize), "Database file size decreased"),
-    etap:is(true, (ViewFileSize2 < ViewFileSize), "View file size decreased"),
-
-    disable_compact_daemon(),
-    ok = timer:sleep(6000), % 2 times check_interval
-    etap:is(couch_db:is_idle(Db), true, "Database is idle"),
-    populate(70, 70, 200 * 1024),
-    {_, DbFileSize3} = get_db_frag(),
-    {_, ViewFileSize3} = get_view_frag(),
-
-    % enable automatic compaction
-    ok = couch_config:set(
-        "compactions",
-        "_default",
-        "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]",
-        false),
-
-    ok = timer:sleep(4000), % something >= check_interval
-    wait_compaction_finished(),
-
-    {DbFrag4, DbFileSize4} = get_db_frag(),
-    {ViewFrag4, ViewFileSize4} = get_view_frag(),
-
-    etap:is(true, (DbFrag4 < 70), "Database fragmentation is < 70% after compaction"),
-    etap:is(true, (ViewFrag4 < 70), "View fragmentation is < 70% after compaction"),
-    etap:is(true, (DbFileSize4 < DbFileSize3), "Database file size decreased again"),
-    etap:is(true, (ViewFileSize4 < ViewFileSize3), "View file size decreased again"),
-
-    ok = timer:sleep(6000), % 2 times check_interval
-    etap:is(couch_db:is_idle(Db), true, "Database is idle"),
-
-    delete_db(),
-    couch_server_sup:stop(),
-    ok.
-
-disable_compact_daemon() ->
-    Configs = couch_config:get("compactions"),
-    lists:foreach(
-        fun({DbName, _}) ->
-            ok = couch_config:delete("compactions", DbName, false)
-        end,
-        Configs).
-
-admin_user_ctx() ->
-    {user_ctx, #user_ctx{roles = [<<"_admin">>]}}.
-
-create_db() ->
-    {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
-
-delete_db() ->
-    couch_server:delete(test_db_name(), [admin_user_ctx()]).
-
-add_design_doc(Db) ->
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/foo">>},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {<<"foo">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}},
-            {<<"foo2">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}},
-            {<<"foo3">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [DDoc]),
-    {ok, _} = couch_db:ensure_full_commit(Db),
-    ok.
-
-populate(DbFrag, ViewFrag, MinFileSize) ->
-    {CurDbFrag, DbFileSize} = get_db_frag(),
-    {CurViewFrag, ViewFileSize} = get_view_frag(),
-    populate(
-        DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag,
-        lists:min([DbFileSize, ViewFileSize])).
-
-populate(DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, FileSize)
-    when CurDbFrag >= DbFrag, CurViewFrag >= ViewFrag, FileSize >= MinFileSize ->
-    ok;
-populate(DbFrag, ViewFrag, MinFileSize, _, _, _) ->
-    update(),
-    {CurDbFrag, DbFileSize} = get_db_frag(),
-    {CurViewFrag, ViewFileSize} = get_view_frag(),
-    populate(
-        DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag,
-        lists:min([DbFileSize, ViewFileSize])).
-
-update() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    lists:foreach(fun(_) ->
-        Doc = couch_doc:from_json_obj({[{<<"_id">>, couch_uuids:new()}]}),
-        {ok, _} = couch_db:update_docs(Db, [Doc]),
-        query_view()
-    end, lists:seq(1, 100)),
-    couch_db:close(Db).
-
-db_url() ->
-    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
-        binary_to_list(test_db_name()).
-
-query_view() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/_design/foo/_view/foo", [], get),
-    case Code of
-    200 ->
-        ok;
-    _ ->
-        etap:bail("error querying view")
-    end.
-
-get_db_frag() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, Info} = couch_db:get_db_info(Db),
-    couch_db:close(Db),
-    FileSize = couch_util:get_value(disk_size, Info),
-    DataSize = couch_util:get_value(data_size, Info),
-    {round((FileSize - DataSize) / FileSize * 100), FileSize}.
-
-get_view_frag() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, Info} = couch_mrview:get_info(Db, <<"_design/foo">>),
-    couch_db:close(Db),
-    FileSize = couch_util:get_value(disk_size, Info),
-    DataSize = couch_util:get_value(data_size, Info),
-    {round((FileSize - DataSize) / FileSize * 100), FileSize}.
-
-
-wait_compaction_finished() ->
-    Parent = self(),
-    Loop = spawn_link(fun() -> wait_loop(Parent) end),
-    receive
-    {done, Loop} ->
-        etap:diag("Database and view compaction have finished")
-    after 60000 ->
-        etap:bail("Compaction not triggered")
-    end.
-
-wait_loop(Parent) ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, DbInfo} = couch_db:get_db_info(Db),
-    {ok, ViewInfo} = couch_mrview:get_info(Db, <<"_design/foo">>),
-    couch_db:close(Db),
-    case (couch_util:get_value(compact_running, ViewInfo) =:= true) orelse
-        (couch_util:get_value(compact_running, DbInfo) =:= true) of
-    false ->
-        Parent ! {done, self()};
-    true ->
-        ok = timer:sleep(500),
-        wait_loop(Parent)
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/230-pbkfd2.t
----------------------------------------------------------------------
diff --git a/test/etap/230-pbkfd2.t b/test/etap/230-pbkfd2.t
deleted file mode 100644
index d980ef6..0000000
--- a/test/etap/230-pbkfd2.t
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(6),
-    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 1, 20),
-            {ok, <<"0c60c80f961f0e71f3a9b524af6012062fe037a6">>},
-            "test vector #1"),
-    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 2, 20),
-            {ok, <<"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957">>},
-            "test vector #2"),
-    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 4096, 20),
-            {ok, <<"4b007901b765489abead49d926f721d065a429c1">>},
-            "test vector #3"),
-    etap:is(couch_passwords:pbkdf2(<<"passwordPASSWORDpassword">>,
-                                                     <<"saltSALTsaltSALTsaltSALTsaltSALTsalt">>, 4096, 25),
-            {ok, <<"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038">>},
-            "test vector #4"),
-    etap:is(couch_passwords:pbkdf2(<<"pass\0word">>, <<"sa\0lt">>, 4096, 16),
-            {ok, <<"56fa6aa75548099dcc37d7f03425e0c3">>},
-            "test vector #5"),
-    etap:is(couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 16777216, 20),
-            {ok, <<"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984">>},
-            "test vector #6"),
-    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/231-cors.t
----------------------------------------------------------------------
diff --git a/test/etap/231-cors.t b/test/etap/231-cors.t
deleted file mode 100644
index dd08ca8..0000000
--- a/test/etap/231-cors.t
+++ /dev/null
@@ -1,433 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-
--define(SUPPORTED_METHODS, "GET, HEAD, POST, PUT, DELETE, TRACE, CONNECT, COPY, OPTIONS").
-server() ->
-    lists:concat([
-        "http://127.0.0.1:",
-        mochiweb_socket_server:get(couch_httpd, port),
-        "/"
-    ]).
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(28),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-dbname() -> "etap-test-db".
-dbname1() -> "etap-test-db1".
-dbname2() -> "etap-test-db2".
-
-admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
-
-set_admin_password(UserName, Password) ->
-    Hashed = couch_passwords:hash_admin_password(Password),
-    couch_config:set("admins", UserName, Hashed, false).
-
-cycle_db(DbName) ->
-    couch_server:delete(list_to_binary(DbName), [admin_user_ctx()]),
-    {ok, Db} = couch_db:create(list_to_binary(DbName), [admin_user_ctx()]),
-    Db.
-
-test() ->
-
-    ibrowse:start(),
-    crypto:start(),
-
-    %% launch couchdb
-    couch_server_sup:start_link(test_util:config_files()),
-
-    %% initialize db
-    timer:sleep(1000),
-    Db = cycle_db(dbname()),
-    Db1 = cycle_db(dbname1()),
-    Db2 = cycle_db(dbname2()),
-
-    % CORS is disabled by default
-    test_no_headers_server(),
-    test_no_headers_db(),
-
-    % Now enable CORS
-    ok = couch_config:set("httpd", "enable_cors", "true", false),
-    ok = couch_config:set("cors", "origins", "http://example.com", false),
-
-    %% do tests
-    test_incorrect_origin_simple_request(),
-    test_incorrect_origin_preflight_request(),
-
-    test_preflight_request(),
-    test_db_request(),
-    test_doc_with_attachment_request(),
-    test_doc_with_attachment_range_request(),
-    test_db_preflight_request(),
-    test_db1_origin_request(),
-    test_preflight_with_port1(),
-    test_preflight_with_scheme1(),
-
-    ok = couch_config:set("cors", "origins", "http://example.com:5984", false),
-    test_preflight_with_port2(),
-
-    ok = couch_config:set("cors", "origins", "https://example.com:5984", false),
-    test_preflight_with_scheme2(),
-
-    ok = couch_config:set("cors", "origins", "*", false),
-    test_preflight_with_wildcard(),
-
-    ok = couch_config:set("cors", "origins", "http://example.com", false),
-    test_case_sensitive_mismatch_of_allowed_origins(),
-
-    % http://www.w3.org/TR/cors/#supports-credentials
-    % 6.1.3
-    % If the resource supports credentials add a single
-    % Access-Control-Allow-Origin header, with the value
-    % of the Origin header as value, and add a single
-    % Access-Control-Allow-Credentials header with the
-    % case-sensitive string "true" as value.
-    % Otherwise, add a single Access-Control-Allow-Origin
-    % header, with either the value of the Origin header
-    % or the string "*" as value.
-    % Note: The string "*" cannot be used for a resource
-    % that supports credentials.
-    test_db_request_credentials_header_off(),
-    ok = couch_config:set("cors", "credentials", "true", false),
-    test_db_request_credentials_header_on(),
-    % We don’t test wildcards & credentials as that would
-    % fall into the realm of validating config values
-    % which we don’t do at all yet
-
-    % test with vhosts
-    ok = couch_config:set("vhosts", "example.com", "/", false),
-    test_preflight_request(true),
-    test_db_request(true),
-    test_db_preflight_request(true),
-    test_db1_origin_request(true),
-    test_preflight_with_port1(true),
-    test_preflight_with_scheme1(true),
-
-    % TBD
-    % test multiple per-host configuration
-
-    %% do tests with auth
-    ok = set_admin_password("test", "test"),
-
-    test_db_preflight_auth_request(),
-    test_db_origin_auth_request(),
-
-
-    %% restart boilerplate
-    catch couch_db:close(Db),
-    catch couch_db:close(Db1),
-    catch couch_db:close(Db2),
-
-    couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
-    couch_server:delete(list_to_binary(dbname1()), [admin_user_ctx()]),
-    couch_server:delete(list_to_binary(dbname2()), [admin_user_ctx()]),
-
-    timer:sleep(3000),
-    couch_server_sup:stop(),
-    ok.
-
-test_preflight_request() -> test_preflight_request(false).
-test_db_request() -> test_db_request(false).
-test_db_preflight_request() -> test_db_preflight_request(false).
-test_db1_origin_request() -> test_db1_origin_request(false).
-test_preflight_with_port1() -> test_preflight_with_port1(false).
-test_preflight_with_scheme1() -> test_preflight_with_scheme1(false).
-
-%% Cors is disabled, should not return Access-Control-Allow-Origin
-test_no_headers_server() ->
-    Headers = [{"Origin", "http://127.0.0.1"}],
-    {ok, _, Resp, _} = ibrowse:send_req(server(), Headers, get, []),
-    etap:is(proplists:get_value("Access-Control-Allow-Origin", Resp),
-            undefined, "No CORS Headers when disabled").
-
-%% Cors is disabled, should not return Access-Control-Allow-Origin
-test_no_headers_db() ->
-    Headers = [{"Origin", "http://127.0.0.1"}],
-    Url = server() ++ "etap-test-db",
-    {ok, _, Resp, _} = ibrowse:send_req(Url, Headers, get, []),
-    etap:is(proplists:get_value("Access-Control-Allow-Origin", Resp),
-            undefined, "No CORS Headers when disabled").
-
-test_incorrect_origin_simple_request() ->
-    Headers = [{"Origin", "http://127.0.0.1"}],
-    {ok, _, RespHeaders, _} = ibrowse:send_req(server(), Headers, get, []),
-    etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            undefined,
-            "Specified invalid origin, no Access").
-
-test_incorrect_origin_preflight_request() ->
-    Headers = [{"Origin", "http://127.0.0.1"},
-               {"Access-Control-Request-Method", "GET"}],
-    {ok, _, RespHeaders, _} = ibrowse:send_req(server(), Headers, options, []),
-    etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            undefined,
-            "invalid origin").
-
-test_preflight_request(VHost) ->
-    Headers = [{"Origin", "http://example.com"},
-               {"Access-Control-Request-Method", "GET"}]
-               ++ maybe_append_vhost(VHost),
-
-    case ibrowse:send_req(server(), Headers, options, []) of
-    {ok, _, RespHeaders, _}  ->
-        etap:is(proplists:get_value("Access-Control-Allow-Methods", RespHeaders),
-            ?SUPPORTED_METHODS,
-            "test_preflight_request Access-Control-Allow-Methods ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_db_request(VHost) ->
-    Headers = [{"Origin", "http://example.com"}]
-               ++ maybe_append_vhost(VHost),
-    Url = server() ++ "etap-test-db",
-    case ibrowse:send_req(Url, Headers, get, []) of
-    {ok, _, RespHeaders, _Body} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            "http://example.com",
-            "db Access-Control-Allow-Origin ok"),
-        etap:is(proplists:get_value("Access-Control-Expose-Headers", RespHeaders),
-            "Cache-Control, Content-Type, Server",
-            "db Access-Control-Expose-Headers ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-% COUCHDB-1689
-test_doc_with_attachment_request() ->
-    DocUrl = server() ++ "etap-test-db/doc1",
-    ibrowse:send_req(DocUrl ++ "/attachment.txt",
-        [{"Content-Type", "text/plain"}], put, "this is a text attachment"),
-
-    Headers = [{"Origin", "http://example.com"}],
-    Url = DocUrl ++ "?attachments=true",
-    case ibrowse:send_req(Url, Headers, get, []) of
-    {ok, Code, _RespHeaders, _Body} ->
-        etap:is(Code, "200", "Response without errors");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-% COUCHDB-1689
-test_doc_with_attachment_range_request() ->
-    AttachmentUrl = server() ++ "etap-test-db/doc2/attachment.bin",
-    % Use a Content-Type that doesn't get compressed
-    ibrowse:send_req(AttachmentUrl,
-        [{"Content-Type", "application/octet-stream"}], put,
-        "this is an attachment"),
-
-    Headers = [{"Origin", "http://example.com"}, {"Range", "bytes=0-6"}],
-    case ibrowse:send_req(AttachmentUrl, Headers, get, []) of
-    {ok, Code, _RespHeaders, _Body} ->
-        etap:is(Code, "206", "Response without errors");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-% COUCHDB-1697
-test_if_none_match_header() ->
-    Url = server() ++ "etap-test-db/doc2",
-    Headers = [{"Origin", "http://example.com"}],
-    {ok, _, _RespHeaders, _} = ibrowse:send_req(Url, Headers, get, []),
-    ETag = proplists:get_value("ETag", _RespHeaders),
-    Headers2 = [{"Origin", "http://example.com"}, {"If-None-Match", ETag}],
-    case ibrowse:send_req(Url, Headers2, get, []) of
-    {ok, Code, _RespHeaders2, _} ->
-        etap:is(Code, "304", "Responded with Not Modified");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_db_request_credentials_header_off() ->
-    Headers = [{"Origin", "http://example.com"}],
-    Url = server() ++ "etap-test-db",
-    case ibrowse:send_req(Url, Headers, get, []) of
-    {ok, _, RespHeaders, _Body} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Credentials", RespHeaders),
-            undefined,
-            "db Access-Control-Allow-Credentials off");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_db_request_credentials_header_on() ->
-    Headers = [{"Origin", "http://example.com"}],
-    Url = server() ++ "etap-test-db",
-    case ibrowse:send_req(Url, Headers, get, []) of
-    {ok, _, RespHeaders, _Body} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Credentials", RespHeaders),
-            "true",
-            "db Access-Control-Allow-Credentials ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_db_preflight_request(VHost) ->
-    Url = server() ++ "etap-test-db",
-    Headers = [{"Origin", "http://example.com"},
-               {"Access-Control-Request-Method", "GET"}]
-               ++ maybe_append_vhost(VHost),
-    case ibrowse:send_req(Url, Headers, options, []) of
-    {ok, _, RespHeaders, _} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Methods", RespHeaders),
-                ?SUPPORTED_METHODS,
-                "db Access-Control-Allow-Methods ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-
-test_db1_origin_request(VHost) ->
-    Headers = [{"Origin", "http://example.com"}]
-               ++ maybe_append_vhost(VHost),
-    Url = server() ++ "etap-test-db1",
-    case ibrowse:send_req(Url, Headers, get, [], [{host_header, "example.com"}]) of
-    {ok, _, RespHeaders, _Body} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            "http://example.com",
-            "db origin ok");
-    _Else ->
-        io:format("else ~p~n", [_Else]),
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_db_preflight_auth_request() ->
-    Url = server() ++ "etap-test-db2",
-    Headers = [{"Origin", "http://example.com"},
-               {"Access-Control-Request-Method", "GET"}],
-    case ibrowse:send_req(Url, Headers, options, []) of
-    {ok, _Status, RespHeaders, _} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Methods", RespHeaders),
-                ?SUPPORTED_METHODS,
-                "db Access-Control-Allow-Methods ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-
-test_db_origin_auth_request() ->
-    Headers = [{"Origin", "http://example.com"}],
-    Url = server() ++ "etap-test-db2",
-
-    case ibrowse:send_req(Url, Headers, get, [],
-        [{basic_auth, {"test", "test"}}]) of
-    {ok, _, RespHeaders, _Body} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            "http://example.com",
-            "db origin ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_preflight_with_wildcard() ->
-    Headers = [{"Origin", "http://example.com"},
-               {"Access-Control-Request-Method", "GET"}],
-    case ibrowse:send_req(server(), Headers, options, []) of
-    {ok, _, RespHeaders, _}  ->
-        % I would either expect the current origin or a wildcard to be returned
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            "http://example.com",
-            "db origin ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_preflight_with_port1(VHost) ->
-    Headers = [{"Origin", "http://example.com:5984"},
-               {"Access-Control-Request-Method", "GET"}]
-               ++ maybe_append_vhost(VHost),
-    case ibrowse:send_req(server(), Headers, options, []) of
-    {ok, _, RespHeaders, _}  ->
-        % I would either expect the current origin or a wildcard to be returned
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            undefined,
-            "check non defined host:port in origin ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_preflight_with_port2() ->
-    Headers = [{"Origin", "http://example.com:5984"},
-               {"Access-Control-Request-Method", "GET"}],
-    case ibrowse:send_req(server(), Headers, options, []) of
-    {ok, _, RespHeaders, _}  ->
-        % I would either expect the current origin or a wildcard to be returned
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            "http://example.com:5984",
-            "check host:port in origin ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_preflight_with_scheme1(VHost) ->
-    Headers = [{"Origin", "https://example.com:5984"},
-               {"Access-Control-Request-Method", "GET"}]
-               ++ maybe_append_vhost(VHost),
-    case ibrowse:send_req(server(), Headers, options, []) of
-    {ok, _, RespHeaders, _}  ->
-        % I would either expect the current origin or a wildcard to be returned
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            undefined,
-            "check non defined scheme in origin ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_preflight_with_scheme2() ->
-    Headers = [{"Origin", "https://example.com:5984"},
-               {"Access-Control-Request-Method", "GET"}],
-    case ibrowse:send_req(server(), Headers, options, []) of
-    {ok, _, RespHeaders, _}  ->
-        % I would either expect the current origin or a wildcard to be returned
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            "https://example.com:5984",
-            "check scheme in origin ok");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-test_case_sensitive_mismatch_of_allowed_origins() ->
-    Headers = [{"Origin", "http://EXAMPLE.COM"}],
-    Url = server() ++ "etap-test-db",
-    case ibrowse:send_req(Url, Headers, get, []) of
-    {ok, _, RespHeaders, _Body} ->
-        etap:is(proplists:get_value("Access-Control-Allow-Origin", RespHeaders),
-            undefined,
-            "db access config case mismatch");
-    _ ->
-        etap:is(false, true, "ibrowse failed")
-    end.
-
-maybe_append_vhost(true) ->
-    [{"Host", "http://example.com"}];
-maybe_append_vhost(Else) ->
-    [].

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/250-upgrade-legacy-view-files.t
----------------------------------------------------------------------
diff --git a/test/etap/250-upgrade-legacy-view-files.t b/test/etap/250-upgrade-legacy-view-files.t
deleted file mode 100644
index e720b1c..0000000
--- a/test/etap/250-upgrade-legacy-view-files.t
+++ /dev/null
@@ -1,168 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(8),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-
-    % commit sofort
-    ok = couch_config:set("query_server_config", "commit_freq", "0"),
-
-    test_upgrade(),
-
-    couch_server_sup:stop(),
-    ok.
-
-fixture_path() ->
-    test_util:source_file("test/etap/fixtures").
-
-old_db() ->
-    fixture_path() ++ "/" ++ old_db_name().
-
-old_db_name() ->
-    "test.couch".
-
-old_view() ->
-    fixture_path() ++ "/" ++ old_view_name().
-
-old_view_name() ->
-    "3b835456c235b1827e012e25666152f3.view".
-
-new_view_name() ->
-    "a1c5929f912aca32f13446122cc6ce50.view".
-
-couch_url() ->
-    "http://" ++ addr() ++ ":" ++ port().
-
-addr() ->
-    couch_config:get("httpd", "bind_address", "127.0.0.1").
-
-port() ->
-    integer_to_list(mochiweb_socket_server:get(couch_httpd, port)).
-
-
-% <= 1.2.x
--record(index_header,
-    {seq=0,
-    purge_seq=0,
-    id_btree_state=nil,
-    view_states=nil
-    }).
-
-% >= 1.3.x
--record(mrheader, {
-    seq=0,
-    purge_seq=0,
-    id_btree_state=nil,
-    view_states=nil
-}).
-
-ensure_header(File, MatchFun, Msg) ->
-    {ok, Fd} = couch_file:open(File),
-    {ok, {_Sig, Header}} = couch_file:read_header(Fd),
-    couch_file:close(Fd),
-    etap:fun_is(MatchFun, Header, "ensure " ++ Msg ++ " header for file: " ++ File).
-
-file_exists(File) ->
-    % open without creating
-    case file:open(File, [read, raw]) of
-    {ok, Fd_Read} ->
-        file:close(Fd_Read),
-        true;
-    _Error ->
-        false
-    end.
-
-cleanup() ->
-    DbDir = couch_config:get("couchdb", "database_dir"),
-    Files = [
-        DbDir ++ "/test.couch",
-        DbDir ++ "/.test_design/" ++ old_view_name(),
-        DbDir ++ "/.test_design/mrview/" ++ new_view_name()
-    ],
-    lists:foreach(fun(File) -> file:delete(File) end, Files),
-    etap:ok(true, "cleanup").
-
-test_upgrade() ->
-
-    cleanup(),
-
-    % copy old db file into db dir
-    DbDir = couch_config:get("couchdb", "database_dir"),
-    DbTarget = DbDir ++ "/" ++ old_db_name(),
-    filelib:ensure_dir(DbDir),
-    OldDbName = old_db(),
-    {ok, _} = file:copy(OldDbName, DbTarget),
-
-    % copy old view file into view dir
-    ViewDir = couch_config:get("couchdb", "view_index_dir"),
-    ViewTarget = ViewDir ++ "/.test_design/" ++ old_view_name(),
-    filelib:ensure_dir(ViewTarget),
-    OldViewName = old_view(),
-    {ok, _} = file:copy(OldViewName, ViewTarget),
-
-    % ensure old header
-    ensure_header(ViewTarget, fun(#index_header{}) -> true; (_) -> false end, "old"),
-
-    % query view
-    ViewUrl = couch_url() ++ "/test/_design/test/_view/test",
-    {ok, Code, _Headers, Body}  = test_util:request(ViewUrl, [], get),
-
-    % expect results
-    etap:is(Code, 200, "valid view result http status code"),
-    ExpectBody = <<"{\"total_rows\":2,\"offset\":0,\"rows\":[\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470009ec\",\"key\":1,\"value\":null},\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470012b6\",\"key\":2,\"value\":null}\r\n]}\n">>,
-    etap:is(Body, ExpectBody, "valid view result"),
-
-    % ensure old file gone.
-    etap:is(file_exists(ViewTarget), false, "ensure old file is gone"),
-
-    % ensure new header
-    NewViewFile = ViewDir ++ "/.test_design/mrview/" ++ new_view_name(),
-
-    % add doc(s)
-    test_util:request(
-        couch_url() ++ "/test/boo",
-        [{"Content-Type", "application/json"}],
-        put,
-        <<"{\"a\":3}">>),
-
-    % query again
-    {ok, Code2, _Headers2, Body2} = test_util:request(ViewUrl, [], get),
-
-    % expect results
-    etap:is(Code2, 200, "valid view result http status code"),
-    ExpectBody2 = <<"{\"total_rows\":3,\"offset\":0,\"rows\":[\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470009ec\",\"key\":1,\"value\":null},\r\n{\"id\":\"193f2f9c596ddc7ad326f7da470012b6\",\"key\":2,\"value\":null},\r\n{\"id\":\"boo\",\"key\":3,\"value\":null}\r\n]}\n">>,
-    etap:is(Body2, ExpectBody2, "valid view result after doc add"),
-
-    % ensure no rebuild
-    % TBD no idea how to actually test this.
-
-    % ensure new header.
-    timer:sleep(2000),
-    ensure_header(NewViewFile, fun(#mrheader{}) -> true; (_) -> false end, "new"),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
deleted file mode 100644
index 66048a9..0000000
--- a/test/etap/Makefile.am
+++ /dev/null
@@ -1,108 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-##   http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-noinst_SCRIPTS = run
-noinst_DATA = test_util.beam test_web.beam
-
-noinst_PROGRAMS = test_cfg_register
-test_cfg_register_SOURCES = test_cfg_register.c
-test_cfg_register_CFLAGS = -D_BSD_SOURCE
-
-%.beam: %.erl
-	$(ERLC) $<
-
-run: run.tpl
-	sed -e "s|%abs_top_srcdir%|@abs_top_srcdir@|g" \
-	    -e "s|%abs_top_builddir%|@abs_top_builddir@|g" > \
-	$@ < $<
-	chmod +x $@
-
-# @@ wildcards are NOT portable, please replace with clean-local rules
-CLEANFILES = run *.beam
-
-DISTCLEANFILES = temp.*
-
-fixture_files = \
-    fixtures/3b835456c235b1827e012e25666152f3.view \
-    fixtures/test.couch
-
-tap_files = \
-    001-load.t \
-    002-icu-driver.t \
-    010-file-basics.t \
-    011-file-headers.t \
-    020-btree-basics.t \
-    021-btree-reductions.t \
-    030-doc-from-json.t \
-    031-doc-to-json.t \
-    040-util.t \
-    041-uuid-gen-id.ini \
-    041-uuid-gen-seq.ini \
-    041-uuid-gen-utc.ini \
-    041-uuid-gen.t \
-    042-work-queue.t \
-    050-stream.t \
-    060-kt-merging.t \
-    061-kt-missing-leaves.t \
-    062-kt-remove-leaves.t \
-    063-kt-get-leaves.t \
-    064-kt-counting.t \
-    065-kt-stemming.t \
-    070-couch-db.t \
-    072-cleanup.t \
-    073-changes.t \
-    074-doc-update-conflicts.t \
-    075-auth-cache.t \
-    076-file-compression.t \
-    077-couch-db-fast-db-delete-create.t \
-    080-config-get-set.t \
-    081-config-override.1.ini \
-    081-config-override.2.ini \
-    081-config-override.t \
-    082-config-register.t \
-    083-config-no-files.t \
-    090-task-status.t \
-    100-ref-counter.t \
-    120-stats-collect.t \
-    121-stats-aggregates.cfg \
-    121-stats-aggregates.ini \
-    121-stats-aggregates.t \
-    130-attachments-md5.t \
-    140-attachment-comp.t \
-    150-invalid-view-seq.t \
-    160-vhosts.t \
-    170-os-daemons.es \
-    170-os-daemons.t \
-    171-os-daemons-config.es \
-    171-os-daemons-config.t \
-    172-os-daemon-errors.1.sh \
-    172-os-daemon-errors.2.sh \
-    172-os-daemon-errors.3.sh \
-    172-os-daemon-errors.4.sh \
-    172-os-daemon-errors.t \
-    173-os-daemon-cfg-register.t \
-    180-http-proxy.ini \
-    180-http-proxy.t \
-    190-json-stream-parse.t \
-    200-view-group-no-db-leaks.t \
-    201-view-group-shutdown.t \
-    210-os-proc-pool.t \
-    220-compaction-daemon.t \
-    230-pbkfd2.t \
-    231-cors.t \
-    250-upgrade-legacy-view-files.t
-
-EXTRA_DIST = \
-    run.tpl \
-    test_web.erl \
-    $(fixture_files) \
-    $(tap_files)

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view
----------------------------------------------------------------------
diff --git a/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view b/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view
deleted file mode 100644
index 9c67648..0000000
Binary files a/test/etap/fixtures/3b835456c235b1827e012e25666152f3.view and /dev/null differ

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/fixtures/test.couch
----------------------------------------------------------------------
diff --git a/test/etap/fixtures/test.couch b/test/etap/fixtures/test.couch
deleted file mode 100644
index 32c79af..0000000
Binary files a/test/etap/fixtures/test.couch and /dev/null differ

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/run.tpl
----------------------------------------------------------------------
diff --git a/test/etap/run.tpl b/test/etap/run.tpl
deleted file mode 100644
index d6d6dbe..0000000
--- a/test/etap/run.tpl
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/sh -e
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-SRCDIR="%abs_top_srcdir%"
-BUILDDIR="%abs_top_builddir%"
-export ERL_LIBS="$BUILDDIR/src/:$ERL_LIBS"
-export ERL_FLAGS="$ERL_FLAGS -pa $BUILDDIR/test/etap/"
-
-if test $# -eq 1; then
-    OPTS=""
-    TGT=$1
-else
-    OPTS=$1
-    TGT=$2
-fi
-
-if test -f $TGT; then
-    prove $OPTS $TGT
-else
-    prove $OPTS $TGT/*.t
-fi

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/test_cfg_register.c
----------------------------------------------------------------------
diff --git a/test/etap/test_cfg_register.c b/test/etap/test_cfg_register.c
deleted file mode 100644
index c910bac..0000000
--- a/test/etap/test_cfg_register.c
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#include <stdio.h>
-#include <stdlib.h>
-
-int
-main(int argc, const char * argv[])
-{
-    char c = '\0';
-    size_t num = 1;
-    
-    fprintf(stdout, "[\"register\", \"s1\"]\n");
-    fprintf(stdout, "[\"register\", \"s2\", \"k\"]\n");
-    fflush(stdout);
-    
-    while(c != '\n' && num > 0) {
-        num = fread(&c, 1, 1, stdin);
-    }
-    
-    exit(0);
-}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/test_util.erl.in
----------------------------------------------------------------------
diff --git a/test/etap/test_util.erl.in b/test/etap/test_util.erl.in
deleted file mode 100644
index 352714e..0000000
--- a/test/etap/test_util.erl.in
+++ /dev/null
@@ -1,94 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(test_util).
-
--export([init_code_path/0]).
--export([source_file/1, build_file/1, config_files/0]).
--export([run/2]).
--export([request/3, request/4]).
-
-srcdir() ->
-    "@abs_top_srcdir@".
-
-builddir() ->
-    "@abs_top_builddir@".
-
-init_code_path() ->
-    Paths = [
-        "etap",
-        "couchdb",
-        "ejson",
-        "erlang-oauth",
-        "ibrowse",
-        "mochiweb",
-        "snappy"
-    ],
-    lists:foreach(fun(Name) ->
-        code:add_patha(filename:join([builddir(), "src", Name]))
-    end, Paths).
-
-source_file(Name) ->
-    filename:join([srcdir(), Name]).
-
-build_file(Name) ->
-    filename:join([builddir(), Name]).
-
-config_files() ->
-    [
-        build_file("etc/couchdb/default_dev.ini"),
-        source_file("test/random_port.ini"),
-        build_file("etc/couchdb/local_dev.ini")
-    ].
-
-
-run(Plan, Fun) ->
-    test_util:init_code_path(),
-    etap:plan(Plan),
-    case (catch Fun()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally:~n~p", [Other])),
-            timer:sleep(500),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-request(Url, Headers, Method) ->
-    request(Url, Headers, Method, []).
-
-request(Url, Headers, Method, Body) ->
-    request(Url, Headers, Method, Body, 3).
-
-request(_Url, _Headers, _Method, _Body, 0) ->
-    {error, request_failed};
-request(Url, Headers, Method, Body, N) ->
-    case code:is_loaded(ibrowse) of
-    false ->
-        {ok, _} = ibrowse:start();
-    _ ->
-        ok
-    end,
-    case ibrowse:send_req(Url, Headers, Method, Body) of
-    {ok, Code0, RespHeaders, RespBody0} ->
-        Code = list_to_integer(Code0),
-        RespBody = iolist_to_binary(RespBody0),
-        {ok, Code, RespHeaders, RespBody};
-    {error, {'EXIT', {normal, _}}} ->
-        % Connection closed right after a successful request that
-        % used the same connection.
-        request(Url, Headers, Method, Body, N - 1);
-    Error ->
-        Error
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/test_web.erl
----------------------------------------------------------------------
diff --git a/test/etap/test_web.erl b/test/etap/test_web.erl
deleted file mode 100644
index ed78651..0000000
--- a/test/etap/test_web.erl
+++ /dev/null
@@ -1,99 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(test_web).
--behaviour(gen_server).
-
--export([start_link/0, loop/1, get_port/0, set_assert/1, check_last/0]).
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--define(SERVER, test_web_server).
--define(HANDLER, test_web_handler).
-
-start_link() ->
-    gen_server:start({local, ?HANDLER}, ?MODULE, [], []),
-    mochiweb_http:start([
-        {name, ?SERVER},
-        {loop, {?MODULE, loop}},
-        {port, 0}
-    ]).
-
-loop(Req) ->
-    %etap:diag("Handling request: ~p", [Req]),
-    case gen_server:call(?HANDLER, {check_request, Req}) of
-        {ok, RespInfo} ->
-            {ok, Req:respond(RespInfo)};
-        {raw, {Status, Headers, BodyChunks}} ->
-            Resp = Req:start_response({Status, Headers}),
-            lists:foreach(fun(C) -> Resp:send(C) end, BodyChunks),
-            erlang:put(mochiweb_request_force_close, true),
-            {ok, Resp};
-        {chunked, {Status, Headers, BodyChunks}} ->
-            Resp = Req:respond({Status, Headers, chunked}),
-            timer:sleep(500),
-            lists:foreach(fun(C) -> Resp:write_chunk(C) end, BodyChunks),
-            Resp:write_chunk([]),
-            {ok, Resp};
-        {error, Reason} ->
-            etap:diag("Error: ~p", [Reason]),
-            Body = lists:flatten(io_lib:format("Error: ~p", [Reason])),
-            {ok, Req:respond({200, [], Body})}
-    end.
-
-get_port() ->
-    mochiweb_socket_server:get(?SERVER, port).
-
-set_assert(Fun) ->
-    ok = gen_server:call(?HANDLER, {set_assert, Fun}).
-
-check_last() ->
-    gen_server:call(?HANDLER, last_status).
-
-init(_) ->
-    {ok, nil}.
-
-terminate(_Reason, _State) ->
-    ok.
-
-handle_call({check_request, Req}, _From, State) when is_function(State, 1) ->
-    Resp2 = case (catch State(Req)) of
-        {ok, Resp} -> {reply, {ok, Resp}, was_ok};
-        {raw, Resp} -> {reply, {raw, Resp}, was_ok};
-        {chunked, Resp} -> {reply, {chunked, Resp}, was_ok};
-        Error -> {reply, {error, Error}, not_ok}
-    end,
-    Req:cleanup(),
-    Resp2;
-handle_call({check_request, _Req}, _From, _State) ->
-    {reply, {error, no_assert_function}, not_ok};
-handle_call(last_status, _From, State) when is_atom(State) ->
-    {reply, State, nil};
-handle_call(last_status, _From, State) ->
-    {reply, {error, not_checked}, State};
-handle_call({set_assert, Fun}, _From, nil) ->
-    {reply, ok, Fun};
-handle_call({set_assert, _}, _From, State) ->
-    {reply, {error, assert_function_set}, State};
-handle_call(Msg, _From, State) ->
-    {reply, {ignored, Msg}, State}.
-
-handle_cast(Msg, State) ->
-    etap:diag("Ignoring cast message: ~p", [Msg]),
-    {noreply, State}.
-
-handle_info(Msg, State) ->
-    etap:diag("Ignoring info message: ~p", [Msg]),
-    {noreply, State}.
-
-code_change(_OldVsn, State, _Extra) ->
-    {ok, State}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/javascript/Makefile.am
----------------------------------------------------------------------
diff --git a/test/javascript/Makefile.am b/test/javascript/Makefile.am
deleted file mode 100644
index e7036ca..0000000
--- a/test/javascript/Makefile.am
+++ /dev/null
@@ -1,27 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-##   http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-EXTRA_DIST = \
-	cli_runner.js \
-	couch_http.js \
-	test_setup.js \
-	run.tpl
-
-noinst_SCRIPTS = run
-CLEANFILES = run
-
-run: run.tpl
-	sed -e "s|%abs_top_srcdir%|$(abs_top_srcdir)|" \
-		-e "s|%abs_top_builddir%|$(abs_top_builddir)|" \
-		-e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \
-	< $< > $@
-	chmod +x $@

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/javascript/cli_runner.js
----------------------------------------------------------------------
diff --git a/test/javascript/cli_runner.js b/test/javascript/cli_runner.js
deleted file mode 100644
index e8ebd2e..0000000
--- a/test/javascript/cli_runner.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-//
-
-/*
- * Futon test suite was designed to be able to run all tests populated into
- * couchTests. Here we should only be loading one test, so we'll pop the first
- * test off the list and run the test. If more than one item is loaded in the
- * test object, return an error.
- */
-function runTest() {
-  var count = 0;
-  var start = new Date().getTime();
-
-  for(var name in couchTests) {
-      count++;
-  }
-
-  if (count !== 1) {
-      console.log('Only one test per file is allowed.');
-      quit(1);
-  }
-
-  try {
-    // Add artificial wait for each test of 1 sec
-    while (new Date().getTime() < start + 1200);
-    couchTests[name]();
-    print('OK');
-  } catch(e) {
-    console.log("FAIL\nReason: " + e.message);
-    fmtStack(e.stack);
-    quit(1);
-  }
-}
-
-waitForSuccess(CouchDB.isRunning, 'isRunning');
-
-runTest();

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/javascript/couch_http.js
----------------------------------------------------------------------
diff --git a/test/javascript/couch_http.js b/test/javascript/couch_http.js
deleted file mode 100644
index c44ce28..0000000
--- a/test/javascript/couch_http.js
+++ /dev/null
@@ -1,73 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-(function() {
-  if(typeof(CouchHTTP) != "undefined") {
-    CouchHTTP.prototype.open = function(method, url, async) {
-      if(!/^\s*http:\/\//.test(url)) {
-        if(/^\//.test(url)) {
-          // The couch.uri file (base_url) has a trailing slash
-          url = this.base_url + url.slice(1);
-        } else {
-          url = this.base_url + url;
-        }
-      }
-      
-      return this._open(method, url, async);
-    };
-    
-    CouchHTTP.prototype.setRequestHeader = function(name, value) {
-      // Drop content-length headers because cURL will set it for us
-      // based on body length
-      if(name.toLowerCase().replace(/^\s+|\s+$/g, '') != "content-length") {
-        this._setRequestHeader(name, value);
-      }
-    }
-    
-    CouchHTTP.prototype.send = function(body) {
-      this._send(body || "");
-      var headers = {};
-      this._headers.forEach(function(hdr) {
-          var pair = hdr.split(":");
-          var name = pair.shift();
-          headers[name] = pair.join(":").replace(/^\s+|\s+$/g, "");
-      });
-      this.headers = headers;
-    };
-
-    CouchHTTP.prototype.getResponseHeader = function(name) {
-      for(var hdr in this.headers) {
-        if(hdr.toLowerCase() == name.toLowerCase()) {
-          return this.headers[hdr];
-        }
-      }
-      return null;
-    };
-  }
-})();
-
-CouchDB.urlPrefix = "";
-CouchDB.newXhr = function() {
-  return new CouchHTTP();
-};
-
-CouchDB.xhrheader = function(xhr, header) {
-  if(typeof(xhr) == "CouchHTTP") {
-    return xhr.getResponseHeader(header);
-  } else {
-    return xhr.headers[header];
-  }
-}
-
-CouchDB.xhrbody = function(xhr) {
-  return xhr.responseText || xhr.body;
-}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/javascript/run.tpl
----------------------------------------------------------------------
diff --git a/test/javascript/run.tpl b/test/javascript/run.tpl
deleted file mode 100644
index 75192da..0000000
--- a/test/javascript/run.tpl
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/bin/sh -e
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-SRC_DIR=%abs_top_srcdir%
-BUILD_DIR=%abs_top_builddir%
-SCRIPT_DIR=$SRC_DIR/share/www/script
-JS_TEST_DIR=$SRC_DIR/test/javascript
-
-COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs
-COUCH_URI_FILE=%localstaterundir%/couch.uri
-
-# make check-js calls us with MAKE=$(MAKE) so BSDish `gmake` invocations
-# will get passed on correctly. If $0 gets run manually, default to
-# `make`
-if [ -z "$MAKE" ]; then
-    MAKE=make
-fi
-
-trap 'abort' EXIT INT
-
-start() {
-	./utils/run -b -r 0 -n \
-		-a $BUILD_DIR/etc/couchdb/default_dev.ini \
-		-a $SRC_DIR/test/random_port.ini \
-		-a $BUILD_DIR/etc/couchdb/local_dev.ini 1>/dev/null
-}
-
-stop() {
-    ./utils/run -d 1>/dev/null
-}
-
-restart() {
-    stop
-    start
-}
-
-abort() {
-    trap - 0
-    stop
-    exit 2
-}
-
-process_response() {
-    while read data
-    do
-        if [ $data = 'restart' ];
-        then
-            if [ -z $COUCHDB_NO_START ]; then
-                restart
-            fi
-        else
-            echo "$data"
-        fi
-    done
-}
-
-run() {
-    # start the tests
-    /bin/echo -n "$1 ... "
-    $COUCHJS -H -u $COUCH_URI_FILE \
-        $SCRIPT_DIR/json2.js \
-        $SCRIPT_DIR/sha1.js \
-        $SCRIPT_DIR/oauth.js \
-        $SCRIPT_DIR/couch.js \
-        $SCRIPT_DIR/replicator_db_inc.js \
-        $SCRIPT_DIR/couch_test_runner.js \
-        $JS_TEST_DIR/couch_http.js \
-        $JS_TEST_DIR/test_setup.js \
-        $1 \
-        $JS_TEST_DIR/cli_runner.js | process_response
-
-    if [ -z $RESULT ]; then
-        RESULT=$?
-    elif [ "$?" -eq 1 ]; then
-        RESULT=$?
-    fi
-
-}
-
-run_files() {
-    COUNTER=1
-    FILE_COUNT=$(ls -l $1 | wc -l)
-    FILE_COUNT=$(expr $FILE_COUNT + 0)
-    for TEST_SRC in $1
-    do
-        /bin/echo -n "$COUNTER/$FILE_COUNT "
-        COUNTER=$(expr $COUNTER + 1)
-        run $TEST_SRC
-    done
-}
-
-# start CouchDB
-if [ -z $COUCHDB_NO_START ]; then
-    $MAKE dev
-    start
-fi
-
-echo "Running javascript tests ..."
-
-if [ "$#" -eq 0 ];
-then
-    run_files "$SCRIPT_DIR/test/*.js"
-else
-    if [ -d $1 ]; then
-        run_files "$1/*.js"
-    else
-        TEST_SRC="$1"
-        if [ ! -f $TEST_SRC ]; then
-            TEST_SRC="$SCRIPT_DIR/test/$1"
-            if [ ! -f $TEST_SRC ]; then
-                TEST_SRC="$SCRIPT_DIR/test/$1.js"
-                if [ ! -f $TEST_SRC ]; then
-                    echo "file $1 does not exist"
-                    exit 1
-                fi
-            fi
-        fi
-    fi
-    run $TEST_SRC
-fi
-
-if [ -z $COUCHDB_NO_START ]; then
-    stop
-fi
-
-trap - 0
-exit $RESULT

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/javascript/test_setup.js
----------------------------------------------------------------------
diff --git a/test/javascript/test_setup.js b/test/javascript/test_setup.js
deleted file mode 100644
index 9347455..0000000
--- a/test/javascript/test_setup.js
+++ /dev/null
@@ -1,89 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-/*
- * Add global couchTests object required for existing tests.
- */
-var couchTests = {}; 
-
-var console = { 
-  log: function(arg) {
-    var msg = (arg.toString()).replace(/\n/g, "\n    ");
-    print(msg, true);
-  }
-};
-
-var fmtStack = function(stack) {
-  if(!stack) {
-    console.log("No stack information");
-    return;
-  }
-  console.log("Trace back (most recent call first):\n");
-  var re = new RegExp("(.*?)@([^:]*):(.*)$");
-  var lines = stack.split("\n");
-  for(var i = 0; i < lines.length; i++) {
-    var line = lines[i];
-    if(!line.length) continue;
-    var match = re.exec(line);
-    if(!match) continue
-    var match = re.exec(line);
-    if(!match) continue
-    var source = match[1].substr(0, 70);
-    var file = match[2];
-    var lnum = match[3];
-    while(lnum.length < 3) lnum = " " + lnum;
-    console.log(" " + lnum + ": " + file);
-    console.log("      " + source);
-  }
-} 
-
-function T(arg1, arg2) {
-  if(!arg1) {
-    var result = (arg2 ? arg2 : arg1);
-    throw((result instanceof Error ? result : Error(result)));
-  }
-} 
-
-function waitForSuccess(fun, tag) {
-  var start = new Date().getTime();
-  var complete = false;
-  
-  while (!complete) {
-    var now = new Date().getTime();
-    if (now > start + 5000) {
-      complete = true;
-      print('FAIL');
-      print(tag);
-      quit(1);
-    }
-    try {
-      while (new Date().getTime() < now + 500);
-      complete = fun();
-    } catch (e) {}
-  }
-}
-
-function restartServer() {
-  print('restart');
-  var start = new Date().getTime();
-  while (new Date().getTime() < start + 1000);
-  waitForSuccess(CouchDB.isRunning, 'restart');
-}
-
-/*
- * If last_req is an object, we got something back. This might be an error, but
- * CouchDB is up and running!
- */
-CouchDB.isRunning = function() {
-  CouchDB.last_req = CouchDB.request("GET", "/");
-  return typeof CouchDB.last_req == 'object';
-};

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/random_port.ini
----------------------------------------------------------------------
diff --git a/test/random_port.ini b/test/random_port.ini
deleted file mode 100644
index 2b2d130..0000000
--- a/test/random_port.ini
+++ /dev/null
@@ -1,19 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-;
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[httpd]
-port = 0

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/view_server/Makefile.am
----------------------------------------------------------------------
diff --git a/test/view_server/Makefile.am b/test/view_server/Makefile.am
deleted file mode 100644
index 11e7feb..0000000
--- a/test/view_server/Makefile.am
+++ /dev/null
@@ -1,15 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-##   http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-EXTRA_DIST = \
-	query_server_spec.rb \
-	run_native_process.es

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/view_server/query_server_spec.rb
----------------------------------------------------------------------
diff --git a/test/view_server/query_server_spec.rb b/test/view_server/query_server_spec.rb
deleted file mode 100644
index c53daff..0000000
--- a/test/view_server/query_server_spec.rb
+++ /dev/null
@@ -1,824 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-# to run (requires ruby and rspec):
-# spec test/view_server/query_server_spec.rb -f specdoc --color
-# 
-# environment options:
-#   QS_TRACE=true
-#     shows full output from the query server
-#   QS_LANG=lang
-#     run tests on the query server (for now, one of: js, erlang)
-# 
-
-COUCH_ROOT = "#{File.dirname(__FILE__)}/../.." unless defined?(COUCH_ROOT)
-LANGUAGE = ENV["QS_LANG"] || "js"
-
-puts "Running query server specs for #{LANGUAGE} query server"
-
-require 'rspec'
-require 'json'
-
-class OSProcessRunner
-  def self.run
-    trace = ENV["QS_TRACE"] || false
-    puts "launching #{run_command}" if trace
-    if block_given?
-      IO.popen(run_command, "r+") do |io|
-        qs = QueryServerRunner.new(io, trace)
-        yield qs
-      end
-    else
-      io = IO.popen(run_command, "r+")
-      QueryServerRunner.new(io, trace)
-    end
-  end
-  def initialize io, trace = false
-    @qsio = io
-    @trace = trace
-  end
-  def close
-    @qsio.close
-  end
-  def reset!
-    run(["reset"])
-  end
-  def add_fun(fun)
-    run(["add_fun", fun])
-  end
-  def teach_ddoc(ddoc)
-    run(["ddoc", "new", ddoc_id(ddoc), ddoc])
-  end
-  def ddoc_run(ddoc, fun_path, args)
-    run(["ddoc", ddoc_id(ddoc), fun_path, args])
-  end
-  def ddoc_id(ddoc)
-    d_id = ddoc["_id"]
-    raise 'ddoc must have _id' unless d_id
-    d_id
-  end
-  def get_chunks
-    resp = jsgets
-    raise "not a chunk" unless resp.first == "chunks"
-    return resp[1]
-  end
-  def run json
-    rrun json
-    jsgets
-  end
-  def rrun json
-    line = json.to_json
-    puts "run: #{line}" if @trace
-    @qsio.puts line
-  end
-  def rgets
-    resp = @qsio.gets
-    puts "got: #{resp}"  if @trace
-    resp
-  end
-  def jsgets
-    resp = rgets
-    # err = @qserr.gets
-    # puts "err: #{err}" if err
-    if resp
-      begin
-        rj = JSON.parse("[#{resp.chomp}]")[0]
-      rescue JSON::ParserError
-        puts "JSON ERROR (dump under trace mode)"
-        # puts resp.chomp
-        while resp = rgets
-          # puts resp.chomp
-        end
-      end
-      if rj.respond_to?(:[]) && rj.is_a?(Array)
-        if rj[0] == "log"
-          log = rj[1]
-          puts "log: #{log}" if @trace
-          rj = jsgets
-        end
-      end
-      rj
-    else
-      raise "no response"
-    end
-  end
-end
-
-class QueryServerRunner < OSProcessRunner
-
-  COMMANDS = {
-    "js" => "#{COUCH_ROOT}/bin/couchjs_dev #{COUCH_ROOT}/share/server/main.js",
-    "erlang" => "#{COUCH_ROOT}/test/view_server/run_native_process.es"
-  }
-
-  def self.run_command
-    COMMANDS[LANGUAGE]
-  end
-end
-
-class ExternalRunner < OSProcessRunner
-  def self.run_command
-    "#{COUCH_ROOT}/src/couchdb/couchjs #{COUCH_ROOT}/share/server/echo.js"
-  end
-end
-
-# we could organize this into a design document per language.
-# that would make testing future languages really easy.
-
-functions = {
-  "emit-twice" => {
-    "js" => %{function(doc){emit("foo",doc.a); emit("bar",doc.a)}},
-    "erlang" => <<-ERLANG
-      fun({Doc}) ->
-        A = couch_util:get_value(<<"a">>, Doc, null),
-        Emit(<<"foo">>, A),
-        Emit(<<"bar">>, A)
-      end.
-    ERLANG
-  },
-  "emit-once" => {
-    "js" => <<-JS,
-      function(doc){
-        emit("baz",doc.a)
-      }
-      JS
-    "erlang" => <<-ERLANG
-        fun({Doc}) ->
-            A = couch_util:get_value(<<"a">>, Doc, null),
-            Emit(<<"baz">>, A)
-        end.
-    ERLANG
-  },
-  "reduce-values-length" => {
-    "js" => %{function(keys, values, rereduce) { return values.length; }},
-    "erlang" => %{fun(Keys, Values, ReReduce) -> length(Values) end.}
-  },
-  "reduce-values-sum" => {
-    "js" => %{function(keys, values, rereduce) { return sum(values); }},
-    "erlang" => %{fun(Keys, Values, ReReduce) -> lists:sum(Values) end.}
-  },
-  "validate-forbidden" => {
-    "js" => <<-JS,
-      function(newDoc, oldDoc, userCtx) {
-        if(newDoc.bad)
-          throw({forbidden:"bad doc"}); "foo bar";
-      }
-      JS
-    "erlang" => <<-ERLANG
-      fun({NewDoc}, _OldDoc, _UserCtx) ->
-        case couch_util:get_value(<<"bad">>, NewDoc) of
-            undefined -> 1;
-            _ -> {[{forbidden, <<"bad doc">>}]}
-        end
-      end.
-    ERLANG
-  },
-  "show-simple" => {
-    "js" => <<-JS,
-        function(doc, req) {
-            log("ok");
-            return [doc.title, doc.body].join(' - ');
-        }
-    JS
-    "erlang" => <<-ERLANG
-      fun({Doc}, Req) ->
-            Title = couch_util:get_value(<<"title">>, Doc),
-            Body = couch_util:get_value(<<"body">>, Doc),
-            Resp = <<Title/binary, " - ", Body/binary>>,
-        {[{<<"body">>, Resp}]}
-      end.
-    ERLANG
-  },
-  "show-headers" => {
-    "js" => <<-JS,
-        function(doc, req) {
-          var resp = {"code":200, "headers":{"X-Plankton":"Rusty"}};
-          resp.body = [doc.title, doc.body].join(' - ');
-          return resp;
-        }
-     JS
-    "erlang" => <<-ERLANG
-  fun({Doc}, Req) ->
-        Title = couch_util:get_value(<<"title">>, Doc),
-        Body = couch_util:get_value(<<"body">>, Doc),
-        Resp = <<Title/binary, " - ", Body/binary>>,
-        {[
-        {<<"code">>, 200},
-        {<<"headers">>, {[{<<"X-Plankton">>, <<"Rusty">>}]}},
-        {<<"body">>, Resp}
-      ]}
-  end.
-    ERLANG
-  },
-  "show-sends" => {
-    "js" =>  <<-JS,
-        function(head, req) {
-          start({headers:{"Content-Type" : "text/plain"}});
-          send("first chunk");
-          send('second "chunk"');
-          return "tail";
-        };
-    JS
-    "erlang" => <<-ERLANG
-      fun(Head, Req) ->
-        Resp = {[
-          {<<"headers">>, {[{<<"Content-Type">>, <<"text/plain">>}]}}
-        ]},
-        Start(Resp),
-        Send(<<"first chunk">>),
-        Send(<<"second \\\"chunk\\\"">>),
-        <<"tail">>
-      end.
-    ERLANG
-  },
-  "show-while-get-rows" => {
-    "js" =>  <<-JS,
-        function(head, req) {
-          send("first chunk");
-          send(req.q);
-          var row;
-          log("about to getRow " + typeof(getRow));
-          while(row = getRow()) {
-            send(row.key);
-          };
-          return "tail";
-        };
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, {Req}) ->
-            Send(<<"first chunk">>),
-            Send(couch_util:get_value(<<"q">>, Req)),
-            Fun = fun({Row}, _) ->
-                Send(couch_util:get_value(<<"key">>, Row)),
-                {ok, nil}
-            end,
-            {ok, _} = FoldRows(Fun, nil),
-            <<"tail">>
-        end.
-    ERLANG
-  },
-  "show-while-get-rows-multi-send" => {
-    "js" => <<-JS,
-        function(head, req) {
-          send("bacon");
-          var row;
-          log("about to getRow " + typeof(getRow));
-          while(row = getRow()) {
-            send(row.key);
-            send("eggs");
-          };
-          return "tail";
-        };
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, Req) ->
-            Send(<<"bacon">>),
-            Fun = fun({Row}, _) ->
-                Send(couch_util:get_value(<<"key">>, Row)),
-                Send(<<"eggs">>),
-                {ok, nil}
-            end,
-            FoldRows(Fun, nil),
-            <<"tail">>
-        end.
-    ERLANG
-  },
-  "list-simple" => {
-    "js" => <<-JS,
-        function(head, req) {
-          send("first chunk");
-          send(req.q);
-          var row;
-          while(row = getRow()) {
-            send(row.key);
-          };
-          return "early";
-        };
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, {Req}) ->
-            Send(<<"first chunk">>),
-            Send(couch_util:get_value(<<"q">>, Req)),
-            Fun = fun({Row}, _) ->
-                Send(couch_util:get_value(<<"key">>, Row)),
-                {ok, nil}
-            end,
-            FoldRows(Fun, nil),
-            <<"early">>
-        end.
-    ERLANG
-  },
-  "list-chunky" => {
-    "js" => <<-JS,
-        function(head, req) {
-          send("first chunk");
-          send(req.q);
-          var row, i=0;
-          while(row = getRow()) {
-            send(row.key);
-            i += 1;
-            if (i > 2) {
-              return('early tail');
-            }
-          };
-        };
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, {Req}) ->
-            Send(<<"first chunk">>),
-            Send(couch_util:get_value(<<"q">>, Req)),
-            Fun = fun
-                ({Row}, Count) when Count < 2 ->
-                    Send(couch_util:get_value(<<"key">>, Row)),
-                    {ok, Count+1};
-                ({Row}, Count) when Count == 2 ->
-                    Send(couch_util:get_value(<<"key">>, Row)),
-                    {stop, <<"early tail">>}
-            end,
-            {ok, Tail} = FoldRows(Fun, 0),
-            Tail
-        end.
-    ERLANG
-  },
-  "list-old-style" => {
-    "js" => <<-JS,
-        function(head, req, foo, bar) {
-          return "stuff";
-        }
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, Req, Foo, Bar) ->
-            <<"stuff">>
-        end.
-    ERLANG
-  },
-  "list-capped" => {
-    "js" => <<-JS,
-        function(head, req) {
-          send("bacon")
-          var row, i = 0;
-          while(row = getRow()) {
-            send(row.key);
-            i += 1;
-            if (i > 2) {
-              return('early');
-            }
-          };
-        }
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, Req) ->
-            Send(<<"bacon">>),
-            Fun = fun
-                ({Row}, Count) when Count < 2 ->
-                    Send(couch_util:get_value(<<"key">>, Row)),
-                    {ok, Count+1};
-                ({Row}, Count) when Count == 2 ->
-                    Send(couch_util:get_value(<<"key">>, Row)),
-                    {stop, <<"early">>}
-            end,
-            {ok, Tail} = FoldRows(Fun, 0),
-            Tail
-        end.
-    ERLANG
-  },
-  "list-raw" => {
-    "js" => <<-JS,
-        function(head, req) {
-          // log(this.toSource());
-          // log(typeof send);
-          send("first chunk");
-          send(req.q);
-          var row;
-          while(row = getRow()) {
-            send(row.key);
-          };
-          return "tail";
-        };
-    JS
-    "erlang" => <<-ERLANG,
-        fun(Head, {Req}) ->
-            Send(<<"first chunk">>),
-            Send(couch_util:get_value(<<"q">>, Req)),
-            Fun = fun({Row}, _) ->
-                Send(couch_util:get_value(<<"key">>, Row)),
-                {ok, nil}
-            end,
-            FoldRows(Fun, nil),
-            <<"tail">>
-        end.
-    ERLANG
-  },
-  "filter-basic" => {
-    "js" => <<-JS,
-      function(doc, req) {
-        if (doc.good) {
-          return true;
-        }
-      }
-    JS
-    "erlang" => <<-ERLANG,
-        fun({Doc}, Req) ->
-            couch_util:get_value(<<"good">>, Doc)
-        end.
-    ERLANG
-  },
-  "update-basic" => {
-    "js" => <<-JS,
-    function(doc, req) {
-      doc.world = "hello";
-      var resp = [doc, "hello doc"];
-      return resp;
-    }
-    JS
-    "erlang" => <<-ERLANG,
-        fun({Doc}, Req) ->
-            Doc2 = [{<<"world">>, <<"hello">>}|Doc],
-            [{Doc2}, {[{<<"body">>, <<"hello doc">>}]}]
-        end.
-    ERLANG
-  },
-  "error" => {
-    "js" => <<-JS,
-    function() {
-      throw(["error","error_key","testing"]);
-    }
-    JS
-    "erlang" => <<-ERLANG
-    fun(A, B) ->
-      throw([<<"error">>,<<"error_key">>,<<"testing">>])
-    end.
-    ERLANG
-  },
-  "fatal" => {
-    "js" => <<-JS,
-    function() {
-      throw(["fatal","error_key","testing"]);
-    }
-    JS
-    "erlang" => <<-ERLANG
-    fun(A, B) ->
-      throw([<<"fatal">>,<<"error_key">>,<<"testing">>])
-    end.
-    ERLANG
-  }
-}
-
-def make_ddoc(fun_path, fun_str)
-  doc = {"_id"=>"foo"}
-  d = doc
-  while p = fun_path.shift
-    l = p
-    if !fun_path.empty?
-      d[p] = {}
-      d = d[p]
-    end
-  end
-  d[l] = fun_str
-  doc
-end
-
-describe "query server normal case" do
-  before(:all) do
-    `cd #{COUCH_ROOT} && make`
-    @qs = QueryServerRunner.run
-  end
-  after(:all) do
-    @qs.close
-  end
-  it "should reset" do
-    @qs.run(["reset"]).should == true
-  end
-  it "should not erase ddocs on reset" do
-    @fun = functions["show-simple"][LANGUAGE]
-    @ddoc = make_ddoc(["shows","simple"], @fun)
-    @qs.teach_ddoc(@ddoc)
-    @qs.run(["reset"]).should == true   
-    @qs.ddoc_run(@ddoc, 
-      ["shows","simple"], 
-      [{:title => "Best ever", :body => "Doc body"}, {}]).should ==
-    ["resp", {"body" => "Best ever - Doc body"}] 
-  end
-  
-  it "should run map funs" do
-    @qs.reset!
-    @qs.run(["add_fun", functions["emit-twice"][LANGUAGE]]).should == true
-    @qs.run(["add_fun", functions["emit-once"][LANGUAGE]]).should == true
-    rows = @qs.run(["map_doc", {:a => "b"}])
-    rows[0][0].should == ["foo", "b"]
-    rows[0][1].should == ["bar", "b"]
-    rows[1][0].should == ["baz", "b"]
-  end
-  describe "reduce" do
-    before(:all) do
-      @fun = functions["reduce-values-length"][LANGUAGE]
-      @qs.reset!
-    end
-    it "should reduce" do
-      kvs = (0...10).collect{|i|[i,i*2]}
-      @qs.run(["reduce", [@fun], kvs]).should == [true, [10]]
-    end
-  end
-  describe "rereduce" do
-    before(:all) do
-      @fun = functions["reduce-values-sum"][LANGUAGE]
-      @qs.reset!
-    end
-    it "should rereduce" do
-      vs = (0...10).collect{|i|i}
-      @qs.run(["rereduce", [@fun], vs]).should == [true, [45]]
-    end
-  end
-
-  describe "design docs" do
-    before(:all) do
-      @ddoc = {
-        "_id" => "foo"
-      }
-      @qs.reset!
-    end
-    it "should learn design docs" do
-      @qs.teach_ddoc(@ddoc).should == true
-    end
-  end
-
-  # it "should validate"
-  describe "validation" do
-    before(:all) do
-      @fun = functions["validate-forbidden"][LANGUAGE]
-      @ddoc = make_ddoc(["validate_doc_update"], @fun)
-      @qs.teach_ddoc(@ddoc)
-    end
-    it "should allow good updates" do
-      @qs.ddoc_run(@ddoc, 
-        ["validate_doc_update"], 
-        [{"good" => true}, {}, {}]).should == 1
-    end
-    it "should reject invalid updates" do
-      @qs.ddoc_run(@ddoc, 
-        ["validate_doc_update"], 
-        [{"bad" => true}, {}, {}]).should == {"forbidden"=>"bad doc"}
-    end
-  end
-
-  describe "show" do
-    before(:all) do
-      @fun = functions["show-simple"][LANGUAGE]
-      @ddoc = make_ddoc(["shows","simple"], @fun)
-      @qs.teach_ddoc(@ddoc)
-    end
-    it "should show" do
-      @qs.ddoc_run(@ddoc, 
-        ["shows","simple"], 
-        [{:title => "Best ever", :body => "Doc body"}, {}]).should ==
-      ["resp", {"body" => "Best ever - Doc body"}]
-    end
-  end
-
-  describe "show with headers" do
-    before(:all) do
-      # TODO we can make real ddocs up there. 
-      @fun = functions["show-headers"][LANGUAGE]
-      @ddoc = make_ddoc(["shows","headers"], @fun)
-      @qs.teach_ddoc(@ddoc)
-    end
-    it "should show headers" do
-      @qs.ddoc_run(
-        @ddoc, 
-        ["shows","headers"], 
-        [{:title => "Best ever", :body => "Doc body"}, {}]
-      ).
-      should == ["resp", {"code"=>200,"headers" => {"X-Plankton"=>"Rusty"}, "body" => "Best ever - Doc body"}]
-    end
-  end
-  
-  describe "recoverable error" do
-    before(:all) do
-      @fun = functions["error"][LANGUAGE]
-      @ddoc = make_ddoc(["shows","error"], @fun)
-      @qs.teach_ddoc(@ddoc)
-    end
-    it "should not exit" do
-      @qs.ddoc_run(@ddoc, ["shows","error"],
-        [{"foo"=>"bar"}, {"q" => "ok"}]).
-        should == ["error", "error_key", "testing"]
-      # still running
-      @qs.run(["reset"]).should == true
-    end
-  end
-  
-  describe "changes filter" do
-    before(:all) do
-      @fun = functions["filter-basic"][LANGUAGE]
-      @ddoc = make_ddoc(["filters","basic"], @fun)
-      @qs.teach_ddoc(@ddoc)
-    end
-    it "should only return true for good docs" do
-      @qs.ddoc_run(@ddoc, 
-        ["filters","basic"], 
-        [[{"key"=>"bam", "good" => true}, {"foo" => "bar"}, {"good" => true}], {"req" => "foo"}]
-      ).
-      should == [true, [true, false, true]]
-    end
-  end
-  
-  describe "update" do
-    before(:all) do
-      # in another patch we can remove this duplication
-      # by setting up the design doc for each language ahead of time.
-      @fun = functions["update-basic"][LANGUAGE]
-      @ddoc = make_ddoc(["updates","basic"], @fun)
-      @qs.teach_ddoc(@ddoc)
-    end
-    it "should return a doc and a resp body" do
-      up, doc, resp = @qs.ddoc_run(@ddoc, 
-        ["updates","basic"], 
-        [{"foo" => "gnarly"}, {"method" => "POST"}]
-      )
-      up.should == "up"
-      doc.should == {"foo" => "gnarly", "world" => "hello"}
-      resp["body"].should == "hello doc"
-    end
-  end
-
-# end
-#                    LIST TESTS
-# __END__
-
-  describe "ddoc list" do
-      before(:all) do
-        @ddoc = {
-          "_id" => "foo",
-          "lists" => {
-            "simple" => functions["list-simple"][LANGUAGE],
-            "headers" => functions["show-sends"][LANGUAGE],
-            "rows" => functions["show-while-get-rows"][LANGUAGE],
-            "buffer-chunks" => functions["show-while-get-rows-multi-send"][LANGUAGE],
-            "chunky" => functions["list-chunky"][LANGUAGE]
-          }
-        }
-        @qs.teach_ddoc(@ddoc)
-      end
-      
-      describe "example list" do
-        it "should run normal" do
-          @qs.ddoc_run(@ddoc,
-            ["lists","simple"],
-            [{"foo"=>"bar"}, {"q" => "ok"}]
-          ).should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
-          @qs.run(["list_row", {"key"=>"baz"}]).should ==  ["chunks", ["baz"]]
-          @qs.run(["list_row", {"key"=>"bam"}]).should ==  ["chunks", ["bam"]]
-          @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]]
-          @qs.run(["list_row", {"key"=>"fooz"}]).should == ["chunks", ["fooz"]]
-          @qs.run(["list_row", {"key"=>"foox"}]).should == ["chunks", ["foox"]]
-          @qs.run(["list_end"]).should == ["end" , ["early"]]
-        end
-      end
-      
-      describe "headers" do
-        it "should do headers proper" do
-          @qs.ddoc_run(@ddoc, ["lists","headers"], 
-            [{"total_rows"=>1000}, {"q" => "ok"}]
-          ).should == ["start", ["first chunk", 'second "chunk"'], 
-            {"headers"=>{"Content-Type"=>"text/plain"}}]
-          @qs.rrun(["list_end"])
-          @qs.jsgets.should == ["end", ["tail"]]
-        end
-      end
-
-      describe "with rows" do
-        it "should list em" do
-          @qs.ddoc_run(@ddoc, ["lists","rows"], 
-            [{"foo"=>"bar"}, {"q" => "ok"}]).
-            should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
-          @qs.rrun(["list_row", {"key"=>"baz"}])
-          @qs.get_chunks.should == ["baz"]
-          @qs.rrun(["list_row", {"key"=>"bam"}])
-          @qs.get_chunks.should == ["bam"]
-          @qs.rrun(["list_end"])
-          @qs.jsgets.should == ["end", ["tail"]]
-        end
-        it "should work with zero rows" do
-          @qs.ddoc_run(@ddoc, ["lists","rows"],
-            [{"foo"=>"bar"}, {"q" => "ok"}]).
-            should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
-          @qs.rrun(["list_end"])
-          @qs.jsgets.should == ["end", ["tail"]]
-        end
-      end
-      
-      describe "should buffer multiple chunks sent for a single row." do
-        it "should should buffer em" do
-          @qs.ddoc_run(@ddoc, ["lists","buffer-chunks"],
-            [{"foo"=>"bar"}, {"q" => "ok"}]).
-            should == ["start", ["bacon"], {"headers"=>{}}]
-          @qs.rrun(["list_row", {"key"=>"baz"}])
-          @qs.get_chunks.should == ["baz", "eggs"]
-          @qs.rrun(["list_row", {"key"=>"bam"}])
-          @qs.get_chunks.should == ["bam", "eggs"]
-          @qs.rrun(["list_end"])
-          @qs.jsgets.should == ["end", ["tail"]]
-        end
-      end
-      it "should end after 2" do
-        @qs.ddoc_run(@ddoc, ["lists","chunky"],
-          [{"foo"=>"bar"}, {"q" => "ok"}]).
-          should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
-          
-        @qs.run(["list_row", {"key"=>"baz"}]).
-          should ==  ["chunks", ["baz"]]
-
-        @qs.run(["list_row", {"key"=>"bam"}]).
-          should ==  ["chunks", ["bam"]]
-
-        @qs.run(["list_row", {"key"=>"foom"}]).
-          should == ["end", ["foom", "early tail"]]
-        # here's where js has to discard quit properly
-        @qs.run(["reset"]).
-          should == true
-      end
-    end
-  end
-
-
-
-def should_have_exited qs
-  begin
-    qs.run(["reset"])
-    "raise before this (except Erlang)".should == true
-  rescue RuntimeError => e
-    e.message.should == "no response"
-  rescue Errno::EPIPE
-    true.should == true
-  end
-end
-
-describe "query server that exits" do
-  before(:each) do
-    @qs = QueryServerRunner.run
-    @ddoc = {
-      "_id" => "foo",
-      "lists" => {
-        "capped" => functions["list-capped"][LANGUAGE],
-        "raw" => functions["list-raw"][LANGUAGE]
-      },
-      "shows" => {
-        "fatal" => functions["fatal"][LANGUAGE]
-      }
-    }
-    @qs.teach_ddoc(@ddoc)
-  end
-  after(:each) do
-    @qs.close
-  end
-
-  describe "only goes to 2 list" do
-    it "should exit if erlang sends too many rows" do
-      @qs.ddoc_run(@ddoc, ["lists","capped"],
-        [{"foo"=>"bar"}, {"q" => "ok"}]).
-        should == ["start", ["bacon"], {"headers"=>{}}]
-      @qs.run(["list_row", {"key"=>"baz"}]).should ==  ["chunks", ["baz"]]
-      @qs.run(["list_row", {"key"=>"foom"}]).should == ["chunks", ["foom"]]
-      @qs.run(["list_row", {"key"=>"fooz"}]).should == ["end", ["fooz", "early"]]
-      e = @qs.run(["list_row", {"key"=>"foox"}])
-      e[0].should == "error"
-      e[1].should == "unknown_command"
-      should_have_exited @qs
-    end
-  end
-
-  describe "raw list" do
-    it "should exit if it gets a non-row in the middle" do
-      @qs.ddoc_run(@ddoc, ["lists","raw"],
-        [{"foo"=>"bar"}, {"q" => "ok"}]).
-        should == ["start", ["first chunk", "ok"], {"headers"=>{}}]
-      e = @qs.run(["reset"])
-      e[0].should == "error"
-      e[1].should == "list_error"
-      should_have_exited @qs
-    end
-  end
-  
-  describe "fatal error" do
-    it "should exit" do
-      @qs.ddoc_run(@ddoc, ["shows","fatal"],
-        [{"foo"=>"bar"}, {"q" => "ok"}]).
-        should == ["error", "error_key", "testing"]
-      should_have_exited @qs
-    end
-  end
-end
-
-describe "thank you for using the tests" do
-  it "for more info run with QS_TRACE=true or see query_server_spec.rb file header" do
-  end
-end


[10/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/042-work-queue.t
----------------------------------------------------------------------
diff --git a/src/test/etap/042-work-queue.t b/src/test/etap/042-work-queue.t
new file mode 100755
index 0000000..8594a6f
--- /dev/null
+++ b/src/test/etap/042-work-queue.t
@@ -0,0 +1,500 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(155),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    ok = crypto:start(),
+    test_single_consumer_max_item_count(),
+    test_single_consumer_max_size(),
+    test_single_consumer_max_item_count_and_size(),
+    test_multiple_consumers(),
+    ok.
+
+
+test_single_consumer_max_item_count() ->
+    etap:diag("Spawning a queue with 3 max items, 1 producer and 1 consumer"),
+
+    {ok, Q} = couch_work_queue:new([{max_items, 3}]),
+    Producer = spawn_producer(Q),
+    Consumer = spawn_consumer(Q),
+
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+
+    consume(Consumer, 1),
+    etap:is(ping(Consumer), timeout,
+        "Consumer blocked when attempting to dequeue 1 item from empty queue"),
+
+    Item1 = produce(Producer, 10),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+
+    etap:is(ping(Consumer), ok, "Consumer unblocked"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item1]},
+        "Consumer received the right item"),
+
+    Item2 = produce(Producer, 20),
+    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+
+    Item3 = produce(Producer, 15),
+    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+
+    Item4 = produce(Producer, 3),
+    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
+    etap:is(ping(Producer), timeout, "Producer blocked with full queue"),
+
+    consume(Consumer, 2),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue 2 items from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item2, Item3]},
+        "Consumer received the right items"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+
+    consume(Consumer, 2),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue 2 items from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item4]},
+        "Consumer received the right item"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+
+    consume(Consumer, 100),
+    etap:is(ping(Consumer), timeout,
+        "Consumer blocked when attempting to dequeue 100 items from empty queue"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+
+    Item5 = produce(Producer, 11),
+    etap:is(ping(Producer), ok, "Producer not blocked with empty queue"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+
+    Item6 = produce(Producer, 19),
+    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+
+    Item7 = produce(Producer, 2),
+    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+
+    Item8 = produce(Producer, 33),
+    etap:is(ping(Producer), timeout, "Producer blocked with full queue"),
+    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
+
+    etap:is(ping(Consumer), ok, "Consumer unblocked"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item5]},
+        "Consumer received the first queued item"),
+    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
+
+    consume(Consumer, all),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue all items from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item6, Item7, Item8]},
+        "Consumer received all queued items"),
+
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+
+    etap:is(close_queue(Q), ok, "Closed queue"),
+    consume(Consumer, 1),
+    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
+    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
+    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
+
+    stop(Producer, "producer"),
+    stop(Consumer, "consumer").
+
+
+
+test_single_consumer_max_size() ->
+    etap:diag("Spawning a queue with max size of 160 bytes, "
+        "1 producer and 1 consumer"),
+
+    {ok, Q} = couch_work_queue:new([{max_size, 160}]),
+    Producer = spawn_producer(Q),
+    Consumer = spawn_consumer(Q),
+
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    consume(Consumer, 1),
+    etap:is(ping(Consumer), timeout,
+        "Consumer blocked when attempting to dequeue 1 item from empty queue"),
+
+    Item1 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+
+    etap:is(ping(Consumer), ok, "Consumer unblocked"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item1]},
+        "Consumer received the right item"),
+
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    Item2 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+    etap:is(couch_work_queue:size(Q), 50, "Queue size is 50 bytes"),
+
+    Item3 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 100, "Queue size is 100 bytes"),
+
+    Item4 = produce(Producer, 61),
+    etap:is(ping(Producer), timeout, "Producer blocked"),
+    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
+    etap:is(couch_work_queue:size(Q), 161, "Queue size is 161 bytes"),
+
+    consume(Consumer, 1),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue 1 item from full queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item2]},
+        "Consumer received the right item"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 111, "Queue size is 111 bytes"),
+
+    Item5 = produce(Producer, 20),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
+    etap:is(couch_work_queue:size(Q), 131, "Queue size is 131 bytes"),
+
+    Item6 = produce(Producer, 40),
+    etap:is(ping(Producer), timeout, "Producer blocked"),
+    etap:is(couch_work_queue:item_count(Q), 4, "Queue item count is 4"),
+    etap:is(couch_work_queue:size(Q), 171, "Queue size is 171 bytes"),
+
+    etap:is(close_queue(Q), timeout,
+        "Timeout when trying to close non-empty queue"),
+
+    consume(Consumer, 2),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue 2 items from full queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item3, Item4]},
+        "Consumer received the right items"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 60, "Queue size is 60 bytes"),
+
+    etap:is(close_queue(Q), timeout,
+        "Timeout when trying to close non-empty queue"),
+
+    consume(Consumer, all),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue all items from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item5, Item6]},
+        "Consumer received the right items"),
+
+    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
+    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
+
+    consume(Consumer, all),
+    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
+
+    stop(Producer, "producer"),
+    stop(Consumer, "consumer").
+
+
+test_single_consumer_max_item_count_and_size() ->
+    etap:diag("Spawning a queue with 3 max items, max size of 200 bytes, "
+        "1 producer and 1 consumer"),
+
+    {ok, Q} = couch_work_queue:new([{max_items, 3}, {max_size, 200}]),
+    Producer = spawn_producer(Q),
+    Consumer = spawn_consumer(Q),
+
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    Item1 = produce(Producer, 100),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+    etap:is(couch_work_queue:size(Q), 100, "Queue size is 100 bytes"),
+
+    Item2 = produce(Producer, 110),
+    etap:is(ping(Producer), timeout,
+        "Producer blocked when queue size >= max_size"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 210, "Queue size is 210 bytes"),
+
+    consume(Consumer, all),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue all items from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item1, Item2]},
+        "Consumer received the right items"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    etap:is(ping(Producer), ok, "Producer not blocked anymore"),
+
+    Item3 = produce(Producer, 10),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+    etap:is(couch_work_queue:size(Q), 10, "Queue size is 10 bytes"),
+
+    Item4 = produce(Producer, 4),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 14, "Queue size is 14 bytes"),
+
+    Item5 = produce(Producer, 2),
+    etap:is(ping(Producer), timeout,
+        "Producer blocked when queue item count = max_items"),
+    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
+    etap:is(couch_work_queue:size(Q), 16, "Queue size is 16 bytes"),
+
+    consume(Consumer, 1),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue 1 item from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item3]},
+       "Consumer received 1 item"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 6, "Queue size is 6 bytes"),
+
+    etap:is(close_queue(Q), timeout,
+        "Timeout when trying to close non-empty queue"),
+
+    consume(Consumer, 1),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue 1 item from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item4]},
+       "Consumer received 1 item"),
+    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
+    etap:is(couch_work_queue:size(Q), 2, "Queue size is 2 bytes"),
+
+    Item6 = produce(Producer, 50),
+    etap:is(ping(Producer), ok,
+        "Producer not blocked when queue is not full and already received"
+        " a close request"),
+    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
+    etap:is(couch_work_queue:size(Q), 52, "Queue size is 52 bytes"),
+
+    consume(Consumer, all),
+    etap:is(ping(Consumer), ok,
+        "Consumer not blocked when attempting to dequeue all items from queue"),
+    etap:is(last_consumer_items(Consumer), {ok, [Item5, Item6]},
+       "Consumer received all queued items"),
+
+    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
+    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
+
+    consume(Consumer, 1),
+    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
+
+    stop(Producer, "producer"),
+    stop(Consumer, "consumer").
+
+
+test_multiple_consumers() ->
+    etap:diag("Spawning a queue with 3 max items, max size of 200 bytes, "
+        "1 producer and 3 consumers"),
+
+    {ok, Q} = couch_work_queue:new(
+        [{max_items, 3}, {max_size, 200}, {multi_workers, true}]),
+    Producer = spawn_producer(Q),
+    Consumer1 = spawn_consumer(Q),
+    Consumer2 = spawn_consumer(Q),
+    Consumer3 = spawn_consumer(Q),
+
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    consume(Consumer1, 1),
+    etap:is(ping(Consumer1), timeout,
+        "Consumer 1 blocked when attempting to dequeue 1 item from empty queue"),
+    consume(Consumer2, 2),
+    etap:is(ping(Consumer2), timeout,
+        "Consumer 2 blocked when attempting to dequeue 2 items from empty queue"),
+    consume(Consumer3, 1),
+    etap:is(ping(Consumer3), timeout,
+        "Consumer 3 blocked when attempting to dequeue 1 item from empty queue"),
+
+    Item1 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    Item2 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    Item3 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    etap:is(ping(Consumer1), ok, "Consumer 1 unblocked"),
+    etap:is(last_consumer_items(Consumer1), {ok, [Item1]},
+       "Consumer 1 received 1 item"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    etap:is(ping(Consumer2), ok, "Consumer 2 unblocked"),
+    etap:is(last_consumer_items(Consumer2), {ok, [Item2]},
+       "Consumer 2 received 1 item"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    etap:is(ping(Consumer3), ok, "Consumer 3 unblocked"),
+    etap:is(last_consumer_items(Consumer3), {ok, [Item3]},
+       "Consumer 3 received 1 item"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    consume(Consumer1, 1),
+    etap:is(ping(Consumer1), timeout,
+        "Consumer 1 blocked when attempting to dequeue 1 item from empty queue"),
+    consume(Consumer2, 2),
+    etap:is(ping(Consumer2), timeout,
+        "Consumer 2 blocked when attempting to dequeue 1 item from empty queue"),
+    consume(Consumer3, 1),
+    etap:is(ping(Consumer3), timeout,
+        "Consumer 3 blocked when attempting to dequeue 1 item from empty queue"),
+
+    Item4 = produce(Producer, 50),
+    etap:is(ping(Producer), ok, "Producer not blocked"),
+    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
+    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
+
+    etap:is(close_queue(Q), ok, "Closed queue"),
+
+    etap:is(ping(Consumer1), ok, "Consumer 1 unblocked"),
+    etap:is(last_consumer_items(Consumer1), {ok, [Item4]},
+       "Consumer 1 received 1 item"),
+
+    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
+    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
+
+    etap:is(ping(Consumer2), ok, "Consumer 2 unblocked"),
+    etap:is(last_consumer_items(Consumer2), closed,
+        "Consumer 2 received 'closed' atom"),
+
+    etap:is(ping(Consumer3), ok, "Consumer 3 unblocked"),
+    etap:is(last_consumer_items(Consumer3), closed,
+        "Consumer 3 received 'closed' atom"),
+
+    stop(Producer, "producer"),
+    stop(Consumer1, "consumer 1"),
+    stop(Consumer2, "consumer 2"),
+    stop(Consumer3, "consumer 3").
+
+
+close_queue(Q) ->
+    ok = couch_work_queue:close(Q),
+    MonRef = erlang:monitor(process, Q),
+    receive
+    {'DOWN', MonRef, process, Q, _Reason} ->
+         etap:diag("Queue closed")
+    after 3000 ->
+         erlang:demonitor(MonRef),
+         timeout
+    end.
+
+
+spawn_consumer(Q) ->
+    Parent = self(),
+    spawn(fun() -> consumer_loop(Parent, Q, nil) end).
+
+
+consumer_loop(Parent, Q, PrevItem) ->
+    receive
+    {stop, Ref} ->
+        Parent ! {ok, Ref};
+    {ping, Ref} ->
+        Parent ! {pong, Ref},
+        consumer_loop(Parent, Q, PrevItem);
+    {last_item, Ref} ->
+        Parent ! {item, Ref, PrevItem},
+        consumer_loop(Parent, Q, PrevItem);
+    {consume, N} ->
+        Result = couch_work_queue:dequeue(Q, N),
+        consumer_loop(Parent, Q, Result)
+    end.
+
+
+spawn_producer(Q) ->
+    Parent = self(),
+    spawn(fun() -> producer_loop(Parent, Q) end).
+
+
+producer_loop(Parent, Q) ->
+    receive
+    {stop, Ref} ->
+        Parent ! {ok, Ref};
+    {ping, Ref} ->
+        Parent ! {pong, Ref},
+        producer_loop(Parent, Q);
+    {produce, Ref, Size} ->
+        Item = crypto:rand_bytes(Size),
+        Parent ! {item, Ref, Item},
+        ok = couch_work_queue:queue(Q, Item),
+        producer_loop(Parent, Q)
+    end.
+
+
+consume(Consumer, N) ->
+    Consumer ! {consume, N}.
+
+
+last_consumer_items(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {last_item, Ref},
+    receive
+    {item, Ref, Items} ->
+        Items
+    after 3000 ->
+        timeout
+    end.
+
+
+produce(Producer, Size) ->
+    Ref = make_ref(),
+    Producer ! {produce, Ref, Size},
+    receive
+    {item, Ref, Item} ->
+        Item
+    after 3000 ->
+        etap:bail("Timeout asking producer to produce an item")
+    end.
+
+
+ping(Pid) ->
+    Ref = make_ref(),
+    Pid ! {ping, Ref},
+    receive
+    {pong, Ref} ->
+        ok
+    after 3000 ->
+        timeout
+    end.
+
+
+stop(Pid, Name) ->
+    Ref = make_ref(),
+    Pid ! {stop, Ref},
+    receive
+    {ok, Ref} ->
+        etap:diag("Stopped " ++ Name)
+    after 3000 ->
+        etap:bail("Timeout stopping " ++ Name)
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/050-stream.t
----------------------------------------------------------------------
diff --git a/src/test/etap/050-stream.t b/src/test/etap/050-stream.t
new file mode 100755
index 0000000..0251f00
--- /dev/null
+++ b/src/test/etap/050-stream.t
@@ -0,0 +1,87 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(13),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+read_all(Fd, PosList) ->
+    Data = couch_stream:foldl(Fd, PosList, fun(Bin, Acc) -> [Bin, Acc] end, []),
+    iolist_to_binary(Data).
+
+test() ->
+    {ok, Fd} = couch_file:open("test/etap/temp.050", [create,overwrite]),
+    {ok, Stream} = couch_stream:open(Fd),
+
+    etap:is(ok, couch_stream:write(Stream, <<"food">>),
+        "Writing to streams works."),
+
+    etap:is(ok, couch_stream:write(Stream, <<"foob">>),
+        "Consecutive writing to streams works."),
+
+    etap:is(ok, couch_stream:write(Stream, <<>>),
+        "Writing an empty binary does nothing."),
+
+    {Ptrs, Length, _, _, _} = couch_stream:close(Stream),
+    etap:is(Ptrs, [{0, 8}], "Close returns the file pointers."),
+    etap:is(Length, 8, "Close also returns the number of bytes written."),
+    etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."),
+
+    % Remember where we expect the pointer to be.
+    {ok, ExpPtr} = couch_file:bytes(Fd),
+    {ok, Stream2} = couch_stream:open(Fd),
+    OneBits = <<1:(8*10)>>,
+    etap:is(ok, couch_stream:write(Stream2, OneBits),
+        "Successfully wrote 79 zero bits and 1 one bit."),
+
+    ZeroBits = <<0:(8*10)>>,
+    etap:is(ok, couch_stream:write(Stream2, ZeroBits),
+        "Successfully wrote 80 0 bits."),
+
+    {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2),
+    etap:is(Ptrs2, [{ExpPtr, 20}], "Closing stream returns the file pointers."),
+    etap:is(Length2, 20, "Length written is 160 bytes."),
+
+    AllBits = iolist_to_binary([OneBits,ZeroBits]),
+    etap:is(AllBits, read_all(Fd, Ptrs2), "Returned pointers are valid."),
+
+    % Stream more the 4K chunk size.
+    {ok, ExpPtr2} = couch_file:bytes(Fd),
+    {ok, Stream3} = couch_stream:open(Fd, [{buffer_size, 4096}]),
+    lists:foldl(fun(_, Acc) ->
+        Data = <<"a1b2c">>,
+        couch_stream:write(Stream3, Data),
+        [Data | Acc]
+    end, [], lists:seq(1, 1024)),
+    {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3),
+
+    % 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold)
+    % + 5 puts us over the threshold
+    % + 4 bytes for the term_to_binary adding a length header
+    % + 1 byte every 4K for tail append headers
+    SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1,
+    etap:is(Ptrs3, [{ExpPtr2, 4100}, {SecondPtr, 1020}], "Pointers every 4K bytes."),
+    etap:is(Length3, 5120, "Wrote the expected 5K bytes."),
+
+    couch_file:close(Fd),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/060-kt-merging.t
----------------------------------------------------------------------
diff --git a/src/test/etap/060-kt-merging.t b/src/test/etap/060-kt-merging.t
new file mode 100755
index 0000000..efbdbf6
--- /dev/null
+++ b/src/test/etap/060-kt-merging.t
@@ -0,0 +1,176 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(16),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    One = {1, {"1","foo",[]}},
+
+    etap:is(
+        {[One], no_conflicts},
+        couch_key_tree:merge([], One, 10),
+        "The empty tree is the identity for merge."
+    ),
+    etap:is(
+        {[One], no_conflicts},
+        couch_key_tree:merge([One], One, 10),
+        "Merging is reflexive."
+    ),
+
+    TwoSibs = [{1, {"1","foo",[]}},
+               {1, {"2","foo",[]}}],
+
+    etap:is(
+        {TwoSibs, no_conflicts},
+        couch_key_tree:merge(TwoSibs, One, 10),
+        "Merging a prefix of a tree with the tree yields the tree."
+    ),
+
+    Three = {1, {"3","foo",[]}},
+    ThreeSibs = [{1, {"1","foo",[]}},
+                 {1, {"2","foo",[]}},
+                 {1, {"3","foo",[]}}],
+
+    etap:is(
+        {ThreeSibs, conflicts},
+        couch_key_tree:merge(TwoSibs, Three, 10),
+        "Merging a third unrelated branch leads to a conflict."
+    ),
+
+
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+
+    etap:is(
+        {[TwoChild], no_conflicts},
+        couch_key_tree:merge([TwoChild], TwoChild, 10),
+        "Merging two children is still reflexive."
+    ),
+
+    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
+                                     {"1b", "bar", []}]}},
+    etap:is(
+        {[TwoChildSibs], no_conflicts},
+        couch_key_tree:merge([TwoChildSibs], TwoChildSibs, 10),
+        "Merging a tree to itself is itself."),
+
+    TwoChildPlusSibs =
+        {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
+                         {"1b", "bar", []}]}},
+
+    etap:is(
+        {[TwoChildPlusSibs], no_conflicts},
+        couch_key_tree:merge([TwoChild], TwoChildSibs, 10),
+        "Merging tree of uneven length at node 2."),
+
+    Stemmed1b = {2, {"1a", "bar", []}},
+    etap:is(
+        {[TwoChildSibs], no_conflicts},
+        couch_key_tree:merge([TwoChildSibs], Stemmed1b, 10),
+        "Merging a tree with a stem."
+    ),
+
+    TwoChildSibs2 = {1, {"1","foo", [{"1a", "bar", []},
+                                     {"1b", "bar", [{"1bb", "boo", []}]}]}},
+    Stemmed1bb = {3, {"1bb", "boo", []}},
+    etap:is(
+        {[TwoChildSibs2], no_conflicts},
+        couch_key_tree:merge([TwoChildSibs2], Stemmed1bb, 10),
+        "Merging a stem at a deeper level."
+    ),
+
+    StemmedTwoChildSibs2 = [{2,{"1a", "bar", []}},
+                            {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
+
+    etap:is(
+        {StemmedTwoChildSibs2, no_conflicts},
+        couch_key_tree:merge(StemmedTwoChildSibs2, Stemmed1bb, 10),
+        "Merging a stem at a deeper level against paths at deeper levels."
+    ),
+
+    Stemmed1aa = {3, {"1aa", "bar", []}},
+    etap:is(
+        {[TwoChild], no_conflicts},
+        couch_key_tree:merge([TwoChild], Stemmed1aa, 10),
+        "Merging a single tree with a deeper stem."
+    ),
+
+    Stemmed1a = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
+    etap:is(
+        {[TwoChild], no_conflicts},
+        couch_key_tree:merge([TwoChild], Stemmed1a, 10),
+        "Merging a larger stem."
+    ),
+
+    etap:is(
+        {[Stemmed1a], no_conflicts},
+        couch_key_tree:merge([Stemmed1a], Stemmed1aa, 10),
+        "More merging."
+    ),
+
+    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
+    Expect1 = [OneChild, Stemmed1aa],
+    etap:is(
+        {Expect1, conflicts},
+        couch_key_tree:merge([OneChild], Stemmed1aa, 10),
+        "Merging should create conflicts."
+    ),
+
+    etap:is(
+        {[TwoChild], no_conflicts},
+        couch_key_tree:merge(Expect1, TwoChild, 10),
+        "Merge should have no conflicts."
+    ),
+
+    %% this test is based on couch-902-test-case2.py
+    %% foo has conflicts from replication at depth two
+    %% foo3 is the current value
+    Foo = {1, {"foo",
+               "val1",
+               [{"foo2","val2",[]},
+                {"foo3", "val3", []}
+               ]}},
+    %% foo now has an attachment added, which leads to foo4 and val4
+    %% off foo3
+    Bar = {1, {"foo",
+               [],
+               [{"foo3",
+                 [],
+                 [{"foo4","val4",[]}
+                  ]}]}},
+    %% this is what the merge returns
+    %% note that it ignore the conflicting branch as there's no match
+    FooBar = {1, {"foo",
+               "val1",
+               [{"foo2","val2",[]},
+                {"foo3", "val3", [{"foo4","val4",[]}]}
+               ]}},
+
+    etap:is(
+      {[FooBar], no_conflicts},
+      couch_key_tree:merge([Foo],Bar,10),
+      "Merging trees with conflicts ought to behave."
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/061-kt-missing-leaves.t
----------------------------------------------------------------------
diff --git a/src/test/etap/061-kt-missing-leaves.t b/src/test/etap/061-kt-missing-leaves.t
new file mode 100755
index 0000000..d60b4db
--- /dev/null
+++ b/src/test/etap/061-kt-missing-leaves.t
@@ -0,0 +1,65 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(4),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    Stemmed2 = [{2, {"1aa", "bar", []}}],
+
+    etap:is(
+        [],
+        couch_key_tree:find_missing(TwoChildSibs, [{0,"1"}, {1,"1a"}]),
+        "Look for missing keys."
+    ),
+
+    etap:is(
+        [{0, "10"}, {100, "x"}],
+        couch_key_tree:find_missing(
+            TwoChildSibs,
+            [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}]
+        ),
+        "Look for missing keys."
+    ),
+
+    etap:is(
+        [{0, "1"}, {100, "x"}],
+        couch_key_tree:find_missing(
+            Stemmed1,
+            [{0,"1"}, {1,"1a"}, {100, "x"}]
+        ),
+        "Look for missing keys."
+    ),
+    etap:is(
+        [{0, "1"}, {1,"1a"}, {100, "x"}],
+        couch_key_tree:find_missing(
+            Stemmed2,
+            [{0,"1"}, {1,"1a"}, {100, "x"}]
+        ),
+        "Look for missing keys."
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/062-kt-remove-leaves.t
----------------------------------------------------------------------
diff --git a/src/test/etap/062-kt-remove-leaves.t b/src/test/etap/062-kt-remove-leaves.t
new file mode 100755
index 0000000..745a00b
--- /dev/null
+++ b/src/test/etap/062-kt-remove-leaves.t
@@ -0,0 +1,69 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(6),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+
+    etap:is(
+        {TwoChildSibs, []},
+        couch_key_tree:remove_leafs(TwoChildSibs, []),
+        "Removing no leaves has no effect on the tree."
+    ),
+
+    etap:is(
+        {TwoChildSibs, []},
+        couch_key_tree:remove_leafs(TwoChildSibs, [{0, "1"}]),
+        "Removing a non-existant branch has no effect."
+    ),
+
+    etap:is(
+        {OneChild, [{1, "1b"}]},
+        couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1b"}]),
+        "Removing a leaf removes the leaf."
+    ),
+
+    etap:is(
+        {[], [{1, "1b"},{1, "1a"}]},
+        couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1a"}, {1, "1b"}]),
+        "Removing all leaves returns an empty tree."
+    ),
+
+    etap:is(
+        {Stemmed, []},
+        couch_key_tree:remove_leafs(Stemmed, [{1, "1a"}]),
+        "Removing a non-existant node has no effect."
+    ),
+
+    etap:is(
+        {[], [{2, "1aa"}]},
+        couch_key_tree:remove_leafs(Stemmed, [{2, "1aa"}]),
+        "Removing the last leaf returns an empty tree."
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/063-kt-get-leaves.t
----------------------------------------------------------------------
diff --git a/src/test/etap/063-kt-get-leaves.t b/src/test/etap/063-kt-get-leaves.t
new file mode 100755
index 0000000..6d4e800
--- /dev/null
+++ b/src/test/etap/063-kt-get-leaves.t
@@ -0,0 +1,98 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(11),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+
+    etap:is(
+        {[{"foo", {0, ["1"]}}],[]},
+        couch_key_tree:get(TwoChildSibs, [{0, "1"}]),
+        "extract a subtree."
+    ),
+
+    etap:is(
+        {[{"bar", {1, ["1a", "1"]}}],[]},
+        couch_key_tree:get(TwoChildSibs, [{1, "1a"}]),
+        "extract a subtree."
+    ),
+
+    etap:is(
+        {[],[{0,"x"}]},
+        couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}]),
+        "gather up the leaves."
+    ),
+
+    etap:is(
+        {[{"bar", {1, ["1a","1"]}}],[]},
+        couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}]),
+        "gather up the leaves."
+    ),
+
+    etap:is(
+        {[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
+        couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}]),
+        "gather up the leaves."
+    ),
+
+    etap:is(
+        {[{0,[{"1", "foo"}]}],[]},
+        couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}]),
+        "retrieve full key paths."
+    ),
+
+    etap:is(
+        {[{1,[{"1a", "bar"},{"1", "foo"}]}],[]},
+        couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}]),
+        "retrieve full key paths."
+    ),
+
+    etap:is(
+        [{2, [{"1aa", "bar"},{"1a", "bar"}]}],
+        couch_key_tree:get_all_leafs_full(Stemmed),
+        "retrieve all leaves."
+    ),
+
+    etap:is(
+        [{1, [{"1a", "bar"},{"1", "foo"}]}, {1, [{"1b", "bar"},{"1", "foo"}]}],
+        couch_key_tree:get_all_leafs_full(TwoChildSibs),
+        "retrieve all the leaves."
+    ),
+
+    etap:is(
+        [{"bar", {2, ["1aa","1a"]}}],
+        couch_key_tree:get_all_leafs(Stemmed),
+        "retrieve all leaves."
+    ),
+
+    etap:is(
+        [{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}],
+        couch_key_tree:get_all_leafs(TwoChildSibs),
+        "retrieve all the leaves."
+    ),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/064-kt-counting.t
----------------------------------------------------------------------
diff --git a/src/test/etap/064-kt-counting.t b/src/test/etap/064-kt-counting.t
new file mode 100755
index 0000000..f182d28
--- /dev/null
+++ b/src/test/etap/064-kt-counting.t
@@ -0,0 +1,46 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(4),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    EmptyTree = [],
+    One = [{0, {"1","foo",[]}}],
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    Stemmed = [{2, {"1bb", "boo", []}}],
+
+    etap:is(0, couch_key_tree:count_leafs(EmptyTree),
+        "Empty trees have no leaves."),
+
+    etap:is(1, couch_key_tree:count_leafs(One),
+        "Single node trees have a single leaf."),
+
+    etap:is(2, couch_key_tree:count_leafs(TwoChildSibs),
+        "Two children siblings counted as two leaves."),
+
+    etap:is(1, couch_key_tree:count_leafs(Stemmed),
+        "Stemming does not affect leaf counting."),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/065-kt-stemming.t
----------------------------------------------------------------------
diff --git a/src/test/etap/065-kt-stemming.t b/src/test/etap/065-kt-stemming.t
new file mode 100755
index 0000000..6e781c1
--- /dev/null
+++ b/src/test/etap/065-kt-stemming.t
@@ -0,0 +1,42 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(3),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    Stemmed2 = [{2, {"1aa", "bar", []}}],
+
+    etap:is(TwoChild, couch_key_tree:stem(TwoChild, 3),
+        "Stemming more levels than what exists does nothing."),
+
+    etap:is(Stemmed1, couch_key_tree:stem(TwoChild, 2),
+        "Stemming with a depth of two returns the deepest two nodes."),
+
+    etap:is(Stemmed2, couch_key_tree:stem(TwoChild, 1),
+        "Stemming to a depth of one returns the deepest node."),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/070-couch-db.t
----------------------------------------------------------------------
diff --git a/src/test/etap/070-couch-db.t b/src/test/etap/070-couch-db.t
new file mode 100755
index 0000000..787d6c6
--- /dev/null
+++ b/src/test/etap/070-couch-db.t
@@ -0,0 +1,73 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(4),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+
+    couch_server_sup:start_link(test_util:config_files()),
+
+    couch_db:create(<<"etap-test-db">>, []),
+    {ok, AllDbs} = couch_server:all_databases(),
+    etap:ok(lists:member(<<"etap-test-db">>, AllDbs), "Database was created."),
+
+    couch_server:delete(<<"etap-test-db">>, []),
+    {ok, AllDbs2} = couch_server:all_databases(),
+    etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2),
+        "Database was deleted."),
+
+    gen_server:call(couch_server, {set_max_dbs_open, 3}),
+    MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end,
+
+    lists:foreach(fun(Int) ->
+        {ok, TestDbs} = couch_server:all_databases(),
+        ok = case lists:member(MkDbName(Int), TestDbs) of
+            true -> couch_server:delete(MkDbName(Int), []);
+            _ -> ok
+        end,
+        {ok, Db} = couch_db:create(MkDbName(Int), []),
+        ok = couch_db:close(Db)
+    end, lists:seq(1, 6)),
+
+    {ok, AllDbs3} = couch_server:all_databases(),
+    NumCreated = lists:foldl(fun(Int, Acc) ->
+        true = lists:member(MkDbName(Int), AllDbs3),
+        Acc+1
+    end, 0, lists:seq(1, 6)),
+    etap:is(6, NumCreated, "Created all databases."),
+
+    lists:foreach(fun(Int) ->
+        ok = couch_server:delete(MkDbName(Int), [])
+    end, lists:seq(1, 6)),
+
+    {ok, AllDbs4} = couch_server:all_databases(),
+    NumDeleted = lists:foldl(fun(Int, Acc) ->
+        false = lists:member(MkDbName(Int), AllDbs4),
+        Acc+1
+    end, 0, lists:seq(1, 6)),
+    etap:is(6, NumDeleted, "Deleted all databases."),
+
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/072-cleanup.t
----------------------------------------------------------------------
diff --git a/src/test/etap/072-cleanup.t b/src/test/etap/072-cleanup.t
new file mode 100755
index 0000000..9cbcdfa
--- /dev/null
+++ b/src/test/etap/072-cleanup.t
@@ -0,0 +1,126 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-define(TEST_DB, <<"etap-test-db">>).
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+-define(ADMIN_USER, #user_ctx{roles=[<<"_admin">>]}).
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(7),
+    try test() of
+        ok ->
+            etap:end_tests()
+    catch
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            timer:sleep(1000),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+
+    {ok, _} = couch_server_sup:start_link(test_util:config_files()),
+    couch_server:delete(?TEST_DB, []),
+    timer:sleep(1000),
+
+    couch_db:create(?TEST_DB, []),
+
+    {ok, AllDbs} = couch_server:all_databases(),
+    etap:ok(lists:member(?TEST_DB, AllDbs), "Database was created."),
+
+    FooRev = create_design_doc(<<"_design/foo">>, <<"bar">>),
+    query_view("foo", "bar"),
+
+    BoozRev = create_design_doc(<<"_design/booz">>, <<"baz">>),
+    query_view("booz", "baz"),
+
+    {ok, _Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
+    view_cleanup(),
+    etap:is(count_index_files(), 2,
+        "Two index files before any deletions."),
+
+    delete_design_doc(<<"_design/foo">>, FooRev),
+    view_cleanup(),
+    etap:is(count_index_files(), 1,
+        "One index file after first deletion and cleanup."),
+
+    delete_design_doc(<<"_design/booz">>, BoozRev),
+    view_cleanup(),
+    etap:is(count_index_files(), 0,
+        "No index files after second deletion and cleanup."),
+
+    couch_server:delete(?TEST_DB, []),
+    {ok, AllDbs2} = couch_server:all_databases(),
+    etap:ok(not lists:member(?TEST_DB, AllDbs2),
+        "Database was deleted."),
+    ok.
+
+create_design_doc(DDName, ViewName) ->
+    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DDName},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {ViewName, {[
+                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+    Rev.
+
+delete_design_doc(DDName, Rev) ->
+    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DDName},
+        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
+        {<<"_deleted">>, true}
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, [Rev]),
+    couch_db:close(Db).
+
+db_url() ->
+    Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+    "http://" ++ Addr ++ ":" ++ Port ++ "/" ++
+        binary_to_list(?TEST_DB).
+
+query_view(DDoc, View) ->
+    {ok, Code, _Headers, _Body} = test_util:request(
+        db_url() ++ "/_design/" ++ DDoc ++ "/_view/" ++ View, [], get),
+    etap:is(Code, 200, "Built view index for " ++ DDoc ++ "."),
+    ok.
+
+view_cleanup() ->
+    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
+    couch_mrview:cleanup(Db),
+    couch_db:close(Db).
+
+count_index_files() ->
+    % call server to fetch the index files
+    RootDir = couch_config:get("couchdb", "view_index_dir"),
+    length(filelib:wildcard(RootDir ++ "/." ++
+        binary_to_list(?TEST_DB) ++ "_design"++"/mrview/*")).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/073-changes.t
----------------------------------------------------------------------
diff --git a/src/test/etap/073-changes.t b/src/test/etap/073-changes.t
new file mode 100755
index 0000000..d632c2f
--- /dev/null
+++ b/src/test/etap/073-changes.t
@@ -0,0 +1,558 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+% Verify that compacting databases that are being used as the source or
+% target of a replication doesn't affect the replication and that the
+% replication doesn't hold their reference counters forever.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+-record(changes_args, {
+    feed = "normal",
+    dir = fwd,
+    since = 0,
+    limit = 1000000000000000,
+    style = main_only,
+    heartbeat,
+    timeout,
+    filter = "",
+    filter_fun,
+    filter_args = [],
+    include_docs = false,
+    doc_options = [],
+    conflicts = false,
+    db_open_options = []
+}).
+
+-record(row, {
+    id,
+    seq,
+    deleted = false
+}).
+
+
+test_db_name() -> <<"couch_test_changes">>.
+
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(43),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+
+    test_by_doc_ids(),
+    test_by_doc_ids_with_since(),
+    test_by_doc_ids_continuous(),
+    test_design_docs_only(),
+    test_heartbeat(),
+
+    couch_server_sup:stop(),
+    ok.
+
+
+test_by_doc_ids() ->
+    {ok, Db} = create_db(test_db_name()),
+
+    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
+    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
+    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
+    {ok, _Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
+    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
+    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
+    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
+
+    etap:diag("Folding changes in ascending order with _doc_ids filter"),
+    ChangesArgs = #changes_args{
+        filter = "_doc_ids"
+    },
+    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
+
+    {Rows, LastSeq} = wait_finished(Consumer),
+    {ok, Db2} = couch_db:open_int(test_db_name(), []),
+    UpSeq = couch_db:get_update_seq(Db2),
+    couch_db:close(Db2),
+    etap:is(length(Rows), 2, "Received 2 changes rows"),
+    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
+    [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
+    etap:is(Id1, <<"doc4">>, "First row is for doc doc4"),
+    etap:is(Seq1, 4, "First row has seq 4"),
+    etap:is(Id2, <<"doc3">>, "Second row is for doc doc3"),
+    etap:is(Seq2, 6, "Second row has seq 6"),
+
+    stop(Consumer),
+    etap:diag("Folding changes in descending order with _doc_ids filter"),
+    ChangesArgs2 = #changes_args{
+        filter = "_doc_ids",
+        dir = rev
+    },
+    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs2, Req),
+
+    {Rows2, LastSeq2} = wait_finished(Consumer2),
+    etap:is(length(Rows2), 2, "Received 2 changes rows"),
+    etap:is(LastSeq2, 4, "LastSeq is 4"),
+    [#row{seq = Seq1_2, id = Id1_2}, #row{seq = Seq2_2, id = Id2_2}] = Rows2,
+    etap:is(Id1_2, <<"doc3">>, "First row is for doc doc3"),
+    etap:is(Seq1_2, 6, "First row has seq 4"),
+    etap:is(Id2_2, <<"doc4">>, "Second row is for doc doc4"),
+    etap:is(Seq2_2, 4, "Second row has seq 6"),
+
+    stop(Consumer2),
+    delete_db(Db).
+
+
+test_by_doc_ids_with_since() ->
+    {ok, Db} = create_db(test_db_name()),
+
+    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
+    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
+    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
+    {ok, Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
+    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
+    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
+    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
+
+    ChangesArgs = #changes_args{
+        filter = "_doc_ids",
+        since = 5
+    },
+    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
+
+    {Rows, LastSeq} = wait_finished(Consumer),
+    {ok, Db2} = couch_db:open_int(test_db_name(), []),
+    UpSeq = couch_db:get_update_seq(Db2),
+    couch_db:close(Db2),
+    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
+    etap:is(length(Rows), 1, "Received 1 changes rows"),
+    [#row{seq = Seq1, id = Id1}] = Rows,
+    etap:is(Id1, <<"doc3">>, "First row is for doc doc3"),
+    etap:is(Seq1, 6, "First row has seq 6"),
+
+    stop(Consumer),
+
+    ChangesArgs2 = #changes_args{
+        filter = "_doc_ids",
+        since = 6
+    },
+    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs2, Req),
+
+    {Rows2, LastSeq2} = wait_finished(Consumer2),
+    {ok, Db3} = couch_db:open_int(test_db_name(), []),
+    UpSeq2 = couch_db:get_update_seq(Db3),
+    couch_db:close(Db3),
+    etap:is(LastSeq2, UpSeq2, "LastSeq is same as database update seq number"),
+    etap:is(length(Rows2), 0, "Received 0 change rows"),
+
+    stop(Consumer2),
+
+    {ok, _Rev3_3} = save_doc(
+        Db,
+        {[{<<"_id">>, <<"doc3">>}, {<<"_deleted">>, true}, {<<"_rev">>, Rev3_2}]}),
+
+    ChangesArgs3 = #changes_args{
+        filter = "_doc_ids",
+        since = 9
+    },
+    Consumer3 = spawn_consumer(test_db_name(), ChangesArgs3, Req),
+
+    {Rows3, LastSeq3} = wait_finished(Consumer3),
+    {ok, Db4} = couch_db:open_int(test_db_name(), []),
+    UpSeq3 = couch_db:get_update_seq(Db4),
+    couch_db:close(Db4),
+    etap:is(LastSeq3, UpSeq3, "LastSeq is same as database update seq number"),
+    etap:is(length(Rows3), 1, "Received 1 changes rows"),
+    etap:is(
+        [#row{seq = LastSeq3, id = <<"doc3">>, deleted = true}],
+        Rows3,
+        "Received row with doc3 deleted"),
+
+    stop(Consumer3),
+
+    delete_db(Db).
+
+
+test_by_doc_ids_continuous() ->
+    {ok, Db} = create_db(test_db_name()),
+
+    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
+    {ok, Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
+    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
+    {ok, Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
+    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
+    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
+    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
+
+    ChangesArgs = #changes_args{
+        filter = "_doc_ids",
+        feed = "continuous"
+    },
+    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
+
+    pause(Consumer),
+    Rows = get_rows(Consumer),
+
+    etap:is(length(Rows), 2, "Received 2 changes rows"),
+    [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
+    etap:is(Id1, <<"doc4">>, "First row is for doc doc4"),
+    etap:is(Seq1, 4, "First row has seq 4"),
+    etap:is(Id2, <<"doc3">>, "Second row is for doc doc3"),
+    etap:is(Seq2, 6, "Second row has seq 6"),
+
+    clear_rows(Consumer),
+    {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
+    {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
+    unpause(Consumer),
+    pause(Consumer),
+    etap:is(get_rows(Consumer), [], "No new rows"),
+
+    {ok, Rev4_2} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}, {<<"_rev">>, Rev4}]}),
+    {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
+    {ok, _Rev4_3} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}, {<<"_rev">>, Rev4_2}]}),
+    {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
+    {ok, Rev3_3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3_2}]}),
+    unpause(Consumer),
+    pause(Consumer),
+
+    NewRows = get_rows(Consumer),
+    etap:is(length(NewRows), 2, "Received 2 new rows"),
+    [Row14, Row16] = NewRows,
+    etap:is(Row14#row.seq, 14, "First row has seq 14"),
+    etap:is(Row14#row.id, <<"doc4">>, "First row is for doc doc4"),
+    etap:is(Row16#row.seq, 16, "Second row has seq 16"),
+    etap:is(Row16#row.id, <<"doc3">>, "Second row is for doc doc3"),
+
+    clear_rows(Consumer),
+    {ok, _Rev3_4} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3_3}]}),
+    unpause(Consumer),
+    pause(Consumer),
+    etap:is(get_rows(Consumer), [#row{seq = 17, id = <<"doc3">>}],
+        "Got row for seq 17, doc doc3"),
+
+    unpause(Consumer),
+    stop(Consumer),
+    delete_db(Db).
+
+
+test_design_docs_only() ->
+    {ok, Db} = create_db(test_db_name()),
+
+    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"_design/foo">>}]}),
+
+    ChangesArgs = #changes_args{
+        filter = "_design"
+    },
+    Consumer = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
+
+    {Rows, LastSeq} = wait_finished(Consumer),
+    {ok, Db2} = couch_db:open_int(test_db_name(), []),
+    UpSeq = couch_db:get_update_seq(Db2),
+    couch_db:close(Db2),
+
+    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
+    etap:is(length(Rows), 1, "Received 1 changes rows"),
+    etap:is(Rows, [#row{seq = 3, id = <<"_design/foo">>}], "Received row with ddoc"),
+
+    stop(Consumer),
+
+    {ok, Db3} = couch_db:open_int(
+        test_db_name(), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]),
+    {ok, _Rev3_2} = save_doc(
+        Db3,
+        {[{<<"_id">>, <<"_design/foo">>}, {<<"_rev">>, Rev3},
+            {<<"_deleted">>, true}]}),
+
+    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
+
+    {Rows2, LastSeq2} = wait_finished(Consumer2),
+    UpSeq2 = UpSeq + 1,
+    couch_db:close(Db3),
+
+    etap:is(LastSeq2, UpSeq2, "LastSeq is same as database update seq number"),
+    etap:is(length(Rows2), 1, "Received 1 changes rows"),
+    etap:is(
+        Rows2,
+        [#row{seq = 4, id = <<"_design/foo">>, deleted = true}],
+        "Received row with deleted ddoc"),
+
+    stop(Consumer2),
+    delete_db(Db).
+
+test_heartbeat() ->
+    {ok, Db} = create_db(test_db_name()),
+
+    {ok, _} = save_doc(Db, {[
+        {<<"_id">>, <<"_design/foo">>},
+        {<<"language">>, <<"javascript">>},
+            {<<"filters">>, {[
+                {<<"foo">>, <<"function(doc) { if ((doc._id == 'doc10') ||
+                                                  (doc._id == 'doc11') ||
+                                                  (doc._id == 'doc12')) {
+                                                return true;
+                                               } else {
+                                                  return false;
+                                               }}">>
+            }]}}
+    ]}),
+
+    ChangesArgs = #changes_args{
+        filter = "foo/foo",
+        feed = "continuous",
+        timeout = 10000,
+        heartbeat = 1000
+    },
+    Consumer = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
+
+    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+    timer:sleep(200),
+    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+    timer:sleep(200),
+    {ok, _Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
+    timer:sleep(200),
+    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
+    timer:sleep(200),
+    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
+    timer:sleep(200),
+    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
+    timer:sleep(200),
+    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
+    timer:sleep(200),
+    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
+    timer:sleep(200),
+    {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
+    Heartbeats = get_heartbeats(Consumer),
+    etap:is(Heartbeats, 2, "Received 2 heartbeats now"),
+    {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
+    timer:sleep(200),
+    {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
+    timer:sleep(200),
+    {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
+    Heartbeats2 = get_heartbeats(Consumer),
+    etap:is(Heartbeats2, 3, "Received 3 heartbeats now"),
+    Rows = get_rows(Consumer),
+    etap:is(length(Rows), 3, "Received 3 changes rows"),
+
+    {ok, _Rev13} = save_doc(Db, {[{<<"_id">>, <<"doc13">>}]}),
+    timer:sleep(200),
+    {ok, _Rev14} = save_doc(Db, {[{<<"_id">>, <<"doc14">>}]}),
+    timer:sleep(200),
+    Heartbeats3 = get_heartbeats(Consumer),
+    etap:is(Heartbeats3, 6, "Received 6 heartbeats now"),
+    stop(Consumer),
+    couch_db:close(Db),
+    delete_db(Db).
+
+
+save_doc(Db, Json) ->
+    Doc = couch_doc:from_json_obj(Json),
+    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
+    {ok, couch_doc:rev_to_str(Rev)}.
+
+
+get_rows(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {get_rows, Ref},
+    receive
+    {rows, Ref, Rows} ->
+        Rows
+    after 3000 ->
+        etap:bail("Timeout getting rows from consumer")
+    end.
+
+get_heartbeats(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {get_heartbeats, Ref},
+    receive
+    {hearthbeats, Ref, HeartBeats} ->
+        HeartBeats
+    after 3000 ->
+        etap:bail("Timeout getting heartbeats from consumer")
+    end.
+
+
+clear_rows(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {reset, Ref},
+    receive
+    {ok, Ref} ->
+        ok
+    after 3000 ->
+        etap:bail("Timeout clearing consumer rows")
+    end.
+
+
+stop(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {stop, Ref},
+    receive
+    {ok, Ref} ->
+        ok
+    after 3000 ->
+        etap:bail("Timeout stopping consumer")
+    end.
+
+
+pause(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {pause, Ref},
+    receive
+    {paused, Ref} ->
+        ok
+    after 3000 ->
+        etap:bail("Timeout pausing consumer")
+    end.
+
+
+unpause(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {continue, Ref},
+    receive
+    {ok, Ref} ->
+        ok
+    after 3000 ->
+        etap:bail("Timeout unpausing consumer")
+    end.
+
+
+wait_finished(_Consumer) ->
+    receive
+    {consumer_finished, Rows, LastSeq} ->
+        {Rows, LastSeq}
+    after 30000 ->
+        etap:bail("Timeout waiting for consumer to finish")
+    end.
+
+
+spawn_consumer(DbName, ChangesArgs0, Req) ->
+    Parent = self(),
+    spawn(fun() ->
+        put(heartbeat_count, 0),
+        Callback = fun({change, {Change}, _}, _, Acc) ->
+            Id = couch_util:get_value(<<"id">>, Change),
+            Seq = couch_util:get_value(<<"seq">>, Change),
+            Del = couch_util:get_value(<<"deleted">>, Change, false),
+            [#row{id = Id, seq = Seq, deleted = Del} | Acc];
+        ({stop, LastSeq}, _, Acc) ->
+            Parent ! {consumer_finished, lists:reverse(Acc), LastSeq},
+            stop_loop(Parent, Acc);
+        (timeout, _, Acc) ->
+            put(heartbeat_count, get(heartbeat_count) + 1),
+            maybe_pause(Parent, Acc);
+        (_, _, Acc) ->
+            maybe_pause(Parent, Acc)
+        end,
+        {ok, Db} = couch_db:open_int(DbName, []),
+        ChangesArgs = case (ChangesArgs0#changes_args.timeout =:= undefined)
+            andalso (ChangesArgs0#changes_args.heartbeat =:= undefined) of
+        true ->
+            ChangesArgs0#changes_args{timeout = 10, heartbeat = 10};
+        false ->
+            ChangesArgs0
+        end,
+        FeedFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
+        try
+            FeedFun({Callback, []})
+        catch throw:{stop, _} ->
+            ok
+        end,
+        catch couch_db:close(Db)
+    end).
+
+
+maybe_pause(Parent, Acc) ->
+    receive
+    {get_rows, Ref} ->
+        Parent ! {rows, Ref, lists:reverse(Acc)},
+        maybe_pause(Parent, Acc);
+    {get_heartbeats, Ref} ->
+        Parent ! {hearthbeats, Ref, get(heartbeat_count)},
+        maybe_pause(Parent, Acc);
+    {reset, Ref} ->
+        Parent ! {ok, Ref},
+        maybe_pause(Parent, []);
+    {pause, Ref} ->
+        Parent ! {paused, Ref},
+        pause_loop(Parent, Acc);
+    {stop, Ref} ->
+        Parent ! {ok, Ref},
+        throw({stop, Acc})
+    after 0 ->
+        Acc
+    end.
+
+
+pause_loop(Parent, Acc) ->
+    receive
+    {stop, Ref} ->
+        Parent ! {ok, Ref},
+        throw({stop, Acc});
+    {reset, Ref} ->
+        Parent ! {ok, Ref},
+        pause_loop(Parent, []);
+    {continue, Ref} ->
+        Parent ! {ok, Ref},
+        Acc;
+    {get_rows, Ref} ->
+        Parent ! {rows, Ref, lists:reverse(Acc)},
+        pause_loop(Parent, Acc)
+    end.
+
+
+stop_loop(Parent, Acc) ->
+    receive
+    {get_rows, Ref} ->
+        Parent ! {rows, Ref, lists:reverse(Acc)},
+        stop_loop(Parent, Acc);
+    {stop, Ref} ->
+        Parent ! {ok, Ref},
+        Acc
+    end.
+
+
+create_db(DbName) ->
+    couch_db:create(
+        DbName,
+        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]).
+
+
+delete_db(Db) ->
+    ok = couch_server:delete(
+        couch_db:name(Db), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/074-doc-update-conflicts.t
----------------------------------------------------------------------
diff --git a/src/test/etap/074-doc-update-conflicts.t b/src/test/etap/074-doc-update-conflicts.t
new file mode 100755
index 0000000..09d0633
--- /dev/null
+++ b/src/test/etap/074-doc-update-conflicts.t
@@ -0,0 +1,218 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+-define(i2l(I), integer_to_list(I)).
+
+test_db_name() -> <<"couch_test_update_conflicts">>.
+
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(35),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    couch_config:set("couchdb", "delayed_commits", "true", false),
+
+    lists:foreach(
+        fun(NumClients) -> test_concurrent_doc_update(NumClients) end,
+        [100, 500, 1000, 2000, 5000]),
+
+    test_bulk_delete_create(),
+
+    couch_server_sup:stop(),
+    ok.
+
+
+% Verify that if multiple clients try to update the same document
+% simultaneously, only one of them will get success response and all
+% the other ones will get a conflict error. Also validate that the
+% client which got the success response got its document version
+% persisted into the database.
+test_concurrent_doc_update(NumClients) ->
+    {ok, Db} = create_db(test_db_name()),
+    Doc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"foobar">>},
+        {<<"value">>, 0}
+    ]}),
+    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
+    ok = couch_db:close(Db),
+    RevStr = couch_doc:rev_to_str(Rev),
+    etap:diag("Created first revision of test document"),
+
+    etap:diag("Spawning " ++ ?i2l(NumClients) ++
+        " clients to update the document"),
+    Clients = lists:map(
+        fun(Value) ->
+            ClientDoc = couch_doc:from_json_obj({[
+                {<<"_id">>, <<"foobar">>},
+                {<<"_rev">>, RevStr},
+                {<<"value">>, Value}
+            ]}),
+            Pid = spawn_client(ClientDoc),
+            {Value, Pid, erlang:monitor(process, Pid)}
+        end,
+        lists:seq(1, NumClients)),
+
+    lists:foreach(fun({_, Pid, _}) -> Pid ! go end, Clients),
+    etap:diag("Waiting for clients to finish"),
+
+    {NumConflicts, SavedValue} = lists:foldl(
+        fun({Value, Pid, MonRef}, {AccConflicts, AccValue}) ->
+            receive
+            {'DOWN', MonRef, process, Pid, {ok, _NewRev}} ->
+                {AccConflicts, Value};
+            {'DOWN', MonRef, process, Pid, conflict} ->
+                {AccConflicts + 1, AccValue};
+            {'DOWN', MonRef, process, Pid, Error} ->
+                etap:bail("Client " ++ ?i2l(Value) ++
+                    " got update error: " ++ couch_util:to_list(Error))
+            after 60000 ->
+                etap:bail("Timeout waiting for client " ++ ?i2l(Value) ++ " to die")
+            end
+        end,
+        {0, nil},
+        Clients),
+
+    etap:diag("Verifying client results"),
+    etap:is(
+        NumConflicts,
+        NumClients - 1,
+        "Got " ++ ?i2l(NumClients - 1) ++ " client conflicts"),
+
+    {ok, Db2} = couch_db:open_int(test_db_name(), []),
+    {ok, Leaves} = couch_db:open_doc_revs(Db2, <<"foobar">>, all, []),
+    ok = couch_db:close(Db2),
+    etap:is(length(Leaves), 1, "Only one document revision was persisted"),
+    [{ok, Doc2}] = Leaves,
+    {JsonDoc} = couch_doc:to_json_obj(Doc2, []),
+    etap:is(
+        couch_util:get_value(<<"value">>, JsonDoc),
+        SavedValue,
+        "Persisted doc has the right value"),
+
+    ok = timer:sleep(1000),
+    etap:diag("Restarting the server"),
+    couch_server_sup:stop(),
+    ok = timer:sleep(1000),
+    couch_server_sup:start_link(test_util:config_files()),
+
+    {ok, Db3} = couch_db:open_int(test_db_name(), []),
+    {ok, Leaves2} = couch_db:open_doc_revs(Db3, <<"foobar">>, all, []),
+    ok = couch_db:close(Db3),
+    etap:is(length(Leaves2), 1, "Only one document revision was persisted"),
+    [{ok, Doc3}] = Leaves,
+    etap:is(Doc3, Doc2, "Got same document after server restart"),
+
+    delete_db(Db3).
+
+
+% COUCHDB-188
+test_bulk_delete_create() ->
+    {ok, Db} = create_db(test_db_name()),
+    Doc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"foobar">>},
+        {<<"value">>, 0}
+    ]}),
+    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
+
+    DeletedDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"foobar">>},
+        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
+        {<<"_deleted">>, true}
+    ]}),
+    NewDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"foobar">>},
+        {<<"value">>, 666}
+    ]}),
+
+    {ok, Results} = couch_db:update_docs(Db, [DeletedDoc, NewDoc], []),
+    ok = couch_db:close(Db),
+
+    etap:is(length([ok || {ok, _} <- Results]), 2,
+        "Deleted and non-deleted versions got an ok reply"),
+
+    [{ok, Rev1}, {ok, Rev2}] = Results,
+    {ok, Db2} = couch_db:open_int(test_db_name(), []),
+
+    {ok, [{ok, Doc1}]} = couch_db:open_doc_revs(
+        Db2, <<"foobar">>, [Rev1], [conflicts, deleted_conflicts]),
+    {ok, [{ok, Doc2}]} = couch_db:open_doc_revs(
+        Db2, <<"foobar">>, [Rev2], [conflicts, deleted_conflicts]),
+    ok = couch_db:close(Db2),
+
+    {Doc1Props} = couch_doc:to_json_obj(Doc1, []),
+    {Doc2Props} = couch_doc:to_json_obj(Doc2, []),
+
+    etap:is(couch_util:get_value(<<"_deleted">>, Doc1Props), true,
+        "Document was deleted"),
+    etap:is(couch_util:get_value(<<"_deleted">>, Doc2Props), undefined,
+        "New document not flagged as deleted"),
+    etap:is(couch_util:get_value(<<"value">>, Doc2Props), 666,
+        "New leaf revision has the right value"),
+    etap:is(couch_util:get_value(<<"_conflicts">>, Doc1Props), undefined,
+        "Deleted document has no conflicts"),
+    etap:is(couch_util:get_value(<<"_deleted_conflicts">>, Doc1Props), undefined,
+        "Deleted document has no deleted conflicts"),
+    etap:is(couch_util:get_value(<<"_conflicts">>, Doc2Props), undefined,
+        "New leaf revision doesn't have conflicts"),
+    etap:is(couch_util:get_value(<<"_deleted_conflicts">>, Doc2Props), undefined,
+        "New leaf revision doesn't have deleted conflicts"),
+
+    etap:is(element(1, Rev1), 2, "Deleted revision has position 2"),
+    etap:is(element(1, Rev2), 1, "New leaf revision has position 1"),
+
+    delete_db(Db2).
+
+
+spawn_client(Doc) ->
+    spawn(fun() ->
+        {ok, Db} = couch_db:open_int(test_db_name(), []),
+        receive go -> ok end,
+        erlang:yield(),
+        Result = try
+            couch_db:update_doc(Db, Doc, [])
+        catch _:Error ->
+            Error
+        end,
+        ok = couch_db:close(Db),
+        exit(Result)
+    end).
+
+
+create_db(DbName) ->
+    couch_db:create(
+        DbName,
+        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]).
+
+
+delete_db(Db) ->
+    ok = couch_server:delete(
+        couch_db:name(Db), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/etap/075-auth-cache.t
----------------------------------------------------------------------
diff --git a/src/test/etap/075-auth-cache.t b/src/test/etap/075-auth-cache.t
new file mode 100755
index 0000000..623884b
--- /dev/null
+++ b/src/test/etap/075-auth-cache.t
@@ -0,0 +1,276 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-record(user_ctx, {
+    name = null,
+    roles = [],
+    handler
+}).
+
+-record(db, {
+    main_pid = nil,
+    update_pid = nil,
+    compactor_pid = nil,
+    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
+    fd,
+    updater_fd,
+    fd_ref_counter,
+    header,
+    committed_update_seq,
+    fulldocinfo_by_id_btree,
+    docinfo_by_seq_btree,
+    local_docs_btree,
+    update_seq,
+    name,
+    filepath,
+    validate_doc_funs = [],
+    security = [],
+    security_ptr = nil,
+    user_ctx = #user_ctx{},
+    waiting_delayed_commit = nil,
+    revs_limit = 1000,
+    fsync_options = [],
+    options = [],
+    compression,
+    before_doc_update = nil, % nil | fun(Doc, Db) -> NewDoc
+    after_doc_read = nil     % nil | fun(Doc, Db) -> NewDoc
+}).
+
+auth_db_name() -> <<"couch_test_auth_db">>.
+auth_db_2_name() -> <<"couch_test_auth_db_2">>.
+salt() -> <<"SALT">>.
+
+
+main(_) ->
+    test_util:init_code_path(),
+
+    etap:plan(19),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+
+test() ->
+    couch_server_sup:start_link(test_util:config_files()),
+    OrigName = couch_config:get("couch_httpd_auth", "authentication_db"),
+    couch_config:set(
+        "couch_httpd_auth", "authentication_db",
+        binary_to_list(auth_db_name()), false),
+    delete_db(auth_db_name()),
+    delete_db(auth_db_2_name()),
+
+    test_auth_db_crash(),
+
+    couch_config:set("couch_httpd_auth", "authentication_db", OrigName, false),
+    delete_db(auth_db_name()),
+    delete_db(auth_db_2_name()),
+    couch_server_sup:stop(),
+    ok.
+
+
+test_auth_db_crash() ->
+    Creds0 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(Creds0, nil, "Got nil when getting joe's credentials"),
+
+    etap:diag("Adding first version of Joe's user doc"),
+    PasswordHash1 = hash_password("pass1"),
+    {ok, Rev1} = update_user_doc(auth_db_name(), "joe", "pass1"),
+
+    Creds1 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(is_list(Creds1), true, "Got joe's credentials from cache"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds1), PasswordHash1,
+            "Cached credentials have the right password"),
+
+    etap:diag("Updating Joe's user doc password"),
+    PasswordHash2 = hash_password("pass2"),
+    {ok, _Rev2} = update_user_doc(auth_db_name(), "joe", "pass2", Rev1),
+
+    Creds2 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(is_list(Creds2), true, "Got joe's credentials from cache"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds2), PasswordHash2,
+            "Cached credentials have the new password"),
+
+    etap:diag("Shutting down the auth database process"),
+    shutdown_db(auth_db_name()),
+
+    {ok, UpdateRev} = get_doc_rev(auth_db_name(), "joe"),
+    PasswordHash3 = hash_password("pass3"),
+    {ok, _Rev3} = update_user_doc(auth_db_name(), "joe", "pass3", UpdateRev),
+
+    etap:is(get_user_doc_password_sha(auth_db_name(), "joe"),
+            PasswordHash3,
+            "Latest Joe's doc revision has the new password hash"),
+
+    Creds3 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(is_list(Creds3), true, "Got joe's credentials from cache"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds3), PasswordHash3,
+            "Cached credentials have the new password"),
+
+    etap:diag("Deleting Joe's user doc"),
+    delete_user_doc(auth_db_name(), "joe"),
+    Creds4 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(nil, Creds4,
+            "Joe's credentials not found in cache after user doc was deleted"),
+
+    etap:diag("Adding new user doc for Joe"),
+    PasswordHash5 = hash_password("pass5"),
+    {ok, _NewRev1} = update_user_doc(auth_db_name(), "joe", "pass5"),
+
+    Creds5 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(is_list(Creds5), true, "Got joe's credentials from cache"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds5), PasswordHash5,
+            "Cached credentials have the right password"),
+
+    full_commit(auth_db_name()),
+
+    etap:diag("Changing the auth database"),
+    couch_config:set(
+        "couch_httpd_auth", "authentication_db",
+        binary_to_list(auth_db_2_name()), false),
+    ok = timer:sleep(500),
+
+    Creds6 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(nil, Creds6,
+            "Joe's credentials not found in cache after auth database changed"),
+
+    etap:diag("Adding first version of Joe's user doc to new auth database"),
+    PasswordHash7 = hash_password("pass7"),
+    {ok, _} = update_user_doc(auth_db_2_name(), "joe", "pass7"),
+
+    Creds7 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(is_list(Creds7), true, "Got joe's credentials from cache"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds7), PasswordHash7,
+            "Cached credentials have the right password"),
+
+    etap:diag("Shutting down the auth database process"),
+    shutdown_db(auth_db_2_name()),
+
+    {ok, UpdateRev2} = get_doc_rev(auth_db_2_name(), "joe"),
+    PasswordHash8 = hash_password("pass8"),
+    {ok, _Rev8} = update_user_doc(auth_db_2_name(), "joe", "pass8", UpdateRev2),
+
+    etap:is(get_user_doc_password_sha(auth_db_2_name(), "joe"),
+            PasswordHash8,
+            "Latest Joe's doc revision has the new password hash"),
+
+    Creds8 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(is_list(Creds8), true, "Got joe's credentials from cache"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds8), PasswordHash8,
+            "Cached credentials have the new password"),
+
+    etap:diag("Changing the auth database again"),
+    couch_config:set(
+        "couch_httpd_auth", "authentication_db",
+        binary_to_list(auth_db_name()), false),
+    ok = timer:sleep(500),
+
+    Creds9 = couch_auth_cache:get_user_creds("joe"),
+    etap:is(Creds9, Creds5,
+            "Got same credentials as before the firt auth database change"),
+    etap:is(couch_util:get_value(<<"password_sha">>, Creds9), PasswordHash5,
+            "Cached credentials have the right password"),
+    ok.
+
+
+update_user_doc(DbName, UserName, Password) ->
+    update_user_doc(DbName, UserName, Password, nil).
+
+update_user_doc(DbName, UserName, Password, Rev) ->
+    User = iolist_to_binary(UserName),
+    Doc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"org.couchdb.user:", User/binary>>},
+        {<<"name">>, User},
+        {<<"type">>, <<"user">>},
+        {<<"salt">>, salt()},
+        {<<"password_sha">>, hash_password(Password)},
+        {<<"roles">>, []}
+    ] ++ case Rev of
+        nil -> [];
+        _ ->   [{<<"_rev">>, Rev}]
+    end}),
+    {ok, AuthDb} = open_auth_db(DbName),
+    {ok, NewRev} = couch_db:update_doc(AuthDb, Doc, []),
+    ok = couch_db:close(AuthDb),
+    {ok, couch_doc:rev_to_str(NewRev)}.
+
+
+hash_password(Password) ->
+    list_to_binary(
+        couch_util:to_hex(crypto:sha(iolist_to_binary([Password, salt()])))).
+
+
+shutdown_db(DbName) ->
+    {ok, AuthDb} = open_auth_db(DbName),
+    ok = couch_db:close(AuthDb),
+    couch_util:shutdown_sync(AuthDb#db.main_pid),
+    ok = timer:sleep(1000).
+
+
+get_doc_rev(DbName, UserName) ->
+    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
+    {ok, AuthDb} = open_auth_db(DbName),
+    UpdateRev =
+    case couch_db:open_doc(AuthDb, DocId, []) of
+    {ok, Doc} ->
+        {Props} = couch_doc:to_json_obj(Doc, []),
+        couch_util:get_value(<<"_rev">>, Props);
+    {not_found, missing} ->
+        nil
+    end,
+    ok = couch_db:close(AuthDb),
+    {ok, UpdateRev}.
+
+
+get_user_doc_password_sha(DbName, UserName) ->
+    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
+    {ok, AuthDb} = open_auth_db(DbName),
+    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
+    ok = couch_db:close(AuthDb),
+    {Props} = couch_doc:to_json_obj(Doc, []),
+    couch_util:get_value(<<"password_sha">>, Props).
+
+
+delete_user_doc(DbName, UserName) ->
+    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
+    {ok, AuthDb} = open_auth_db(DbName),
+    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
+    {Props} = couch_doc:to_json_obj(Doc, []),
+    DeletedDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DocId},
+        {<<"_rev">>, couch_util:get_value(<<"_rev">>, Props)},
+        {<<"_deleted">>, true}
+    ]}),
+    {ok, _} = couch_db:update_doc(AuthDb, DeletedDoc, []),
+    ok = couch_db:close(AuthDb).
+
+
+full_commit(DbName) ->
+    {ok, AuthDb} = open_auth_db(DbName),
+    {ok, _} = couch_db:ensure_full_commit(AuthDb),
+    ok = couch_db:close(AuthDb).
+
+
+open_auth_db(DbName) ->
+    couch_db:open_int(
+        DbName, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).
+
+
+delete_db(Name) ->
+    couch_server:delete(
+        Name, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).


[06/12] move test -> src/test

Posted by be...@apache.org.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/src/test/view_server/run_native_process.es
----------------------------------------------------------------------
diff --git a/src/test/view_server/run_native_process.es b/src/test/view_server/run_native_process.es
new file mode 100755
index 0000000..fcf16d7
--- /dev/null
+++ b/src/test/view_server/run_native_process.es
@@ -0,0 +1,59 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+read() ->
+    case io:get_line('') of
+        eof -> stop;
+        Data -> couch_util:json_decode(Data)
+    end.
+
+send(Data) when is_binary(Data) ->
+    send(binary_to_list(Data));
+send(Data) when is_list(Data) ->
+    io:format(Data ++ "\n", []).
+
+write(Data) ->
+    % log("~p", [Data]),
+    case (catch couch_util:json_encode(Data)) of
+        % when testing, this is what prints your errors
+        {json_encode, Error} -> write({[{<<"error">>, Error}]});
+        Json -> send(Json)
+    end.
+
+% log(Mesg) ->
+%    log(Mesg, []).
+% log(Mesg, Params) ->
+%    io:format(standard_error, Mesg, Params).
+% jlog(Mesg) ->
+%     write([<<"log">>, list_to_binary(io_lib:format("~p",[Mesg]))]).
+
+loop(Pid) ->
+    case read() of
+        stop -> ok;
+        Json ->
+            case (catch couch_native_process:prompt(Pid, Json)) of
+                {error, Reason} ->
+                    ok = write([error, Reason, Reason]);
+                Resp ->
+                    ok = write(Resp),
+                    loop(Pid)
+            end
+    end.
+
+main([]) ->
+    code:add_pathz("src/couchdb"),
+    code:add_pathz("src/mochiweb"),
+    {ok, Pid} = couch_native_process:start_link(),
+    loop(Pid).
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/Makefile.am
----------------------------------------------------------------------
diff --git a/test/Makefile.am b/test/Makefile.am
deleted file mode 100644
index 7c70a5a..0000000
--- a/test/Makefile.am
+++ /dev/null
@@ -1,15 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-##   http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-SUBDIRS = bench etap javascript view_server
-EXTRA_DIST = random_port.ini
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/bench/Makefile.am
----------------------------------------------------------------------
diff --git a/test/bench/Makefile.am b/test/bench/Makefile.am
deleted file mode 100644
index ce39c4b..0000000
--- a/test/bench/Makefile.am
+++ /dev/null
@@ -1,22 +0,0 @@
-## Licensed under the Apache License, Version 2.0 (the "License"); you may not
-## use this file except in compliance with the License. You may obtain a copy of
-## the License at
-##
-##   http://www.apache.org/licenses/LICENSE-2.0
-##
-## Unless required by applicable law or agreed to in writing, software
-## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-## License for the specific language governing permissions and limitations under
-## the License.
-
-EXTRA_DIST = benchbulk.sh bench_marks.js run.tpl
-
-noinst_SCRIPTS = run
-CLEANFILES = run
-
-run: run.tpl
-	sed -e "s|%abs_top_srcdir%|$(abs_top_srcdir)|" \
-		-e "s|%abs_top_builddir%|$(abs_top_builddir)|" \
-	< $< > $@
-	chmod +x $@

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/bench/bench_marks.js
----------------------------------------------------------------------
diff --git a/test/bench/bench_marks.js b/test/bench/bench_marks.js
deleted file mode 100644
index 4025adb..0000000
--- a/test/bench/bench_marks.js
+++ /dev/null
@@ -1,103 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-var NUM_DOCS = 2000;
-var NUM_BATCHES = 20;
-
-var init = function() {
-  var db = new CouchDB("bench_mark_db", {"X-Couch-Full-Commit": "false"});
-  db.deleteDb();
-  db.createDb();
-  return db;
-};
-
-var timeit = function(func) {
-  var startTime = (new Date()).getTime();
-  func();
-  return ((new Date()).getTime() - startTime) / 1000;
-};
-
-var report = function(name, rate) {
-  rate = Math.round(parseFloat(rate) * 100) / 100;
-  console.log("" + name + ": " + rate + " docs/second");
-};
-
-var makeDocs = function(n) {
-  docs = [];
-  for (var i=0; i < n; i++) {
-    docs.push({"foo":"bar"});
-  };
-  return docs;
-};
-
-var couchTests = {};
-
-couchTests.single_doc_insert = function() {
-  var db = init();
-  var len = timeit(function() {
-    for(var i = 0; i < NUM_DOCS; i++) {
-      db.save({"foo": "bar"});
-    }
-  });
-  report("Single doc inserts", NUM_DOCS/len);
-};
-
-couchTests.batch_ok_doc_insert = function() {
-  var db = init();
-  var len = timeit(function() {
-    for(var i = 0; i < NUM_DOCS; i++) {
-      db.save({"foo":"bar"}, {"batch":"ok"});
-    }
-  });
-  report("Single doc inserts with batch=ok", NUM_DOCS/len);
-};
-
-couchTests.bulk_doc_100 = function() {
-  var db = init();
-  var len = timeit(function() {
-    for(var i = 0; i < NUM_BATCHES; i++) {
-      db.bulkSave(makeDocs(100));
-    }
-  });
-  report("Bulk docs - 100", (NUM_BATCHES*100)/len);
-};
-      
-couchTests.bulk_doc_1000 = function() {
-  var db = init();
-  var len = timeit(function() {
-    for(var i = 0; i < NUM_BATCHES; i++) {
-      db.bulkSave(makeDocs(1000));
-    }
-  });
-  report("Bulk docs - 1000", (NUM_BATCHES*1000)/len);
-};
-
-
-couchTests.bulk_doc_5000 = function() {
-  var db = init();
-  var len = timeit(function() {
-    for(var i = 0; i < NUM_BATCHES; i++) {
-      db.bulkSave(makeDocs(5000));
-    }
-  });
-  report("Bulk docs - 5000", (NUM_BATCHES*5000)/len);
-};
-
-couchTests.bulk_doc_10000 = function() {
-  var db = init();
-  var len = timeit(function() {
-    for(var i = 0; i < NUM_BATCHES; i++) {
-      db.bulkSave(makeDocs(10000));
-    }
-  });
-  report("Bulk docs - 10000", (NUM_BATCHES*10000)/len);
-};

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/bench/benchbulk.sh
----------------------------------------------------------------------
diff --git a/test/bench/benchbulk.sh b/test/bench/benchbulk.sh
deleted file mode 100755
index 55c72e4..0000000
--- a/test/bench/benchbulk.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/sh -e
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-#
-
-# usage: time benchbulk.sh
-# it takes about 30 seconds to run on my old MacBook with bulksize 1000
-
-BULKSIZE=100
-DOCSIZE=10
-INSERTS=10
-ROUNDS=10
-DBURL="http://127.0.0.1:5984/benchbulk"
-POSTURL="$DBURL/_bulk_docs"
-
-function make_bulk_docs() {
-  ROW=0
-  SIZE=$(($1-1))
-  START=$2
-  BODYSIZE=$3  
-  
-  BODY=$(printf "%0${BODYSIZE}d")
-
-  echo '{"docs":['
-  while [ $ROW -lt $SIZE ]; do
-    printf '{"_id":"%020d", "body":"'$BODY'"},' $(($ROW + $START))
-    let ROW=ROW+1
-  done
-  printf '{"_id":"%020d", "body":"'$BODY'"}' $(($ROW + $START))
-  echo ']}'
-}
-
-echo "Making $INSERTS bulk inserts of $BULKSIZE docs each"
-
-echo "Attempt to delete db at $DBURL"
-curl -X DELETE $DBURL -w\\n
-
-echo "Attempt to create db at $DBURL"
-curl -X PUT $DBURL -w\\n
-
-echo "Running $ROUNDS rounds of $INSERTS concurrent inserts to $POSTURL"
-RUN=0
-while [ $RUN -lt $ROUNDS ]; do
-
-  POSTS=0
-  while [ $POSTS -lt $INSERTS ]; do
-    STARTKEY=$[ POSTS * BULKSIZE + RUN * BULKSIZE * INSERTS ]
-    echo "startkey $STARTKEY bulksize $BULKSIZE"
-    DOCS=$(make_bulk_docs $BULKSIZE $STARTKEY $DOCSIZE)
-    # echo $DOCS
-    echo $DOCS | curl -T - -H Content-Type:application/json -X POST $POSTURL -w%{http_code}\ %{time_total}\ sec\\n >/dev/null 2>&1 &
-    let POSTS=POSTS+1
-  done
-
-  echo "waiting"
-  wait
-  let RUN=RUN+1
-done
-
-curl $DBURL -w\\n

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/bench/run.tpl
----------------------------------------------------------------------
diff --git a/test/bench/run.tpl b/test/bench/run.tpl
deleted file mode 100755
index 9307863..0000000
--- a/test/bench/run.tpl
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/sh -e
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-SRC_DIR=%abs_top_srcdir%
-SCRIPT_DIR=$SRC_DIR/share/www/script
-JS_TEST_DIR=$SRC_DIR/test/javascript
-JS_BENCH_DIR=$SRC_DIR/test/bench
-
-COUCHJS=%abs_top_builddir%/src/couchdb/priv/couchjs
-
-cat $SCRIPT_DIR/json2.js \
-    $SCRIPT_DIR/couch.js \
-    $JS_TEST_DIR/couch_http.js \
-    $JS_BENCH_DIR/bench_marks.js \
-    $JS_TEST_DIR/cli_runner.js \
-    | $COUCHJS -
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/001-load.t
----------------------------------------------------------------------
diff --git a/test/etap/001-load.t b/test/etap/001-load.t
deleted file mode 100755
index 5ce0d93..0000000
--- a/test/etap/001-load.t
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% Test that we can load each module.
-
-main(_) ->
-    test_util:init_code_path(),
-    Modules = [
-        couch_auth_cache,
-        couch_btree,
-        couch_changes,
-        couch_compress,
-        couch_config,
-        couch_config_writer,
-        couch_db,
-        couch_db_update_notifier,
-        couch_db_update_notifier_sup,
-        couch_db_updater,
-        couch_doc,
-        % Fails unless couch_config gen_server is started.
-        % couch_ejson_compare,
-        couch_event_sup,
-        couch_external_manager,
-        couch_external_server,
-        couch_file,
-        couch_httpd,
-        couch_httpd_db,
-        couch_httpd_external,
-        couch_httpd_misc_handlers,
-        couch_httpd_rewrite,
-        couch_httpd_stats_handlers,
-        couch_key_tree,
-        couch_log,
-        couch_os_process,
-        couch_query_servers,
-        couch_ref_counter,
-        couch_server,
-        couch_server_sup,
-        couch_stats_aggregator,
-        couch_stats_collector,
-        couch_stream,
-        couch_task_status,
-        couch_util,
-        couch_work_queue,
-        json_stream_parse
-    ],
-
-    etap:plan(length(Modules)),
-    lists:foreach(
-        fun(Module) ->
-            etap:loaded_ok(
-                Module,
-                lists:concat(["Loaded: ", Module])
-            )
-        end, Modules),
-    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/002-icu-driver.t
----------------------------------------------------------------------
diff --git a/test/etap/002-icu-driver.t b/test/etap/002-icu-driver.t
deleted file mode 100755
index e233533..0000000
--- a/test/etap/002-icu-driver.t
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env escript
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    couch_config:start_link(test_util:config_files()),
-    etap:plan(3),
-    etap:is(
-        element(1, couch_drv:start_link()),
-        ok,
-        "Started couch_icu_driver."
-    ),
-    etap:is(
-        couch_util:collate(<<"foo">>, <<"bar">>),
-        1,
-        "Can collate stuff"
-    ),
-    etap:is(
-        couch_util:collate(<<"A">>, <<"aa">>),
-        -1,
-        "Collate's non-ascii style."
-    ),
-    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/010-file-basics.t
----------------------------------------------------------------------
diff --git a/test/etap/010-file-basics.t b/test/etap/010-file-basics.t
deleted file mode 100755
index fb1b29e..0000000
--- a/test/etap/010-file-basics.t
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(etap_match(Got, Expected, Desc),
-        etap:fun_is(fun(XXXXXX) ->
-            case XXXXXX of Expected -> true; _ -> false end
-        end, Got, Desc)).
-
-filename() -> test_util:build_file("test/etap/temp.010").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(19),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    etap:is({error, enoent}, couch_file:open("not a real file"),
-        "Opening a non-existant file should return an enoent error."),
-
-    etap:fun_is(
-        fun({ok, _}) -> true; (_) -> false end,
-        couch_file:open(filename() ++ ".1", [create, invalid_option]),
-        "Invalid flags to open are ignored."
-    ),
-
-    {ok, Fd} = couch_file:open(filename() ++ ".0", [create, overwrite]),
-    etap:ok(is_pid(Fd),
-        "Returned file descriptor is a Pid"),
-
-    etap:is({ok, 0}, couch_file:bytes(Fd),
-        "Newly created files have 0 bytes."),
-
-    ?etap_match(couch_file:append_term(Fd, foo), {ok, 0, _},
-        "Appending a term returns the previous end of file position."),
-
-    {ok, Size} = couch_file:bytes(Fd),
-    etap:is_greater(Size, 0,
-        "Writing a term increased the file size."),
-
-    ?etap_match(couch_file:append_binary(Fd, <<"fancy!">>), {ok, Size, _},
-        "Appending a binary returns the current file size."),
-
-    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
-        "Reading the first term returns what we wrote: foo"),
-
-    etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size),
-        "Reading back the binary returns what we wrote: <<\"fancy\">>."),
-
-    etap:is({ok, couch_compress:compress(foo, snappy)},
-        couch_file:pread_binary(Fd, 0),
-        "Reading a binary at a term position returns the term as binary."
-    ),
-
-    {ok, BinPos, _} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
-    etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
-        "Reading a term from a written binary term representation succeeds."),
-        
-    BigBin = list_to_binary(lists:duplicate(100000, 0)),
-    {ok, BigBinPos, _} = couch_file:append_binary(Fd, BigBin),
-    etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos),
-        "Reading a large term from a written representation succeeds."),
-    
-    ok = couch_file:write_header(Fd, hello),
-    etap:is({ok, hello}, couch_file:read_header(Fd),
-        "Reading a header succeeds."),
-        
-    {ok, BigBinPos2, _} = couch_file:append_binary(Fd, BigBin),
-    etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2),
-        "Reading a large term from a written representation succeeds 2."),
-
-    % append_binary == append_iolist?
-    % Possible bug in pread_iolist or iolist() -> append_binary
-    {ok, IOLPos, _} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
-    {ok, IoList} = couch_file:pread_iolist(Fd, IOLPos),
-    etap:is(<<"foombam">>, iolist_to_binary(IoList),
-        "Reading an results in a binary form of the written iolist()"),
-
-    % XXX: How does on test fsync?
-    etap:is(ok, couch_file:sync(Fd),
-        "Syncing does not cause an error."),
-
-    etap:is(ok, couch_file:truncate(Fd, Size),
-        "Truncating a file succeeds."),
-
-    %etap:is(eof, (catch couch_file:pread_binary(Fd, Size)),
-    %    "Reading data that was truncated fails.")
-    etap:skip(fun() -> ok end,
-        "No idea how to test reading beyond EOF"),
-
-    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
-        "Truncating does not affect data located before the truncation mark."),
-
-    etap:is(ok, couch_file:close(Fd),
-        "Files close properly."),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/011-file-headers.t
----------------------------------------------------------------------
diff --git a/test/etap/011-file-headers.t b/test/etap/011-file-headers.t
deleted file mode 100755
index a26b032..0000000
--- a/test/etap/011-file-headers.t
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() -> test_util:build_file("test/etap/temp.011").
-sizeblock() -> 4096. % Need to keep this in sync with couch_file.erl
-
-main(_) ->
-    test_util:init_code_path(),
-    {S1, S2, S3} = now(),
-    random:seed(S1, S2, S3),
-
-    etap:plan(18),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-
-    etap:is({ok, 0}, couch_file:bytes(Fd),
-        "File should be initialized to contain zero bytes."),
-
-    etap:is(ok, couch_file:write_header(Fd, {<<"some_data">>, 32}),
-        "Writing a header succeeds."),
-
-    {ok, Size1} = couch_file:bytes(Fd),
-    etap:is_greater(Size1, 0,
-        "Writing a header allocates space in the file."),
-
-    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
-        "Reading the header returns what we wrote."),
-
-    etap:is(ok, couch_file:write_header(Fd, [foo, <<"more">>]),
-        "Writing a second header succeeds."),
-
-    {ok, Size2} = couch_file:bytes(Fd),
-    etap:is_greater(Size2, Size1,
-        "Writing a second header allocates more space."),
-
-    etap:is({ok, [foo, <<"more">>]}, couch_file:read_header(Fd),
-        "Reading the second header does not return the first header."),
-
-    % Delete the second header.
-    ok = couch_file:truncate(Fd, Size1),
-
-    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
-        "Reading the header after a truncation returns a previous header."),
-
-    couch_file:write_header(Fd, [foo, <<"more">>]),
-    etap:is({ok, Size2}, couch_file:bytes(Fd),
-        "Rewriting the same second header returns the same second size."),
-
-    couch_file:write_header(Fd, erlang:make_tuple(5000, <<"CouchDB">>)),
-    etap:is(
-        couch_file:read_header(Fd),
-        {ok, erlang:make_tuple(5000, <<"CouchDB">>)},
-        "Headers larger than the block size can be saved (COUCHDB-1319)"
-    ),
-
-    ok = couch_file:close(Fd),
-
-    % Now for the fun stuff. Try corrupting the second header and see
-    % if we recover properly.
-
-    % Destroy the 0x1 byte that marks a header
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        file:pwrite(RawFd, HeaderPos, <<0>>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the byte marker should read the previous header.")
-    end),
-
-    % Corrupt the size.
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        % +1 for 0x1 byte marker
-        file:pwrite(RawFd, HeaderPos+1, <<10/integer>>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the size should read the previous header.")
-    end),
-
-    % Corrupt the MD5 signature
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        % +5 = +1 for 0x1 byte and +4 for term size.
-        file:pwrite(RawFd, HeaderPos+5, <<"F01034F88D320B22">>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the MD5 signature should read the previous header.")
-    end),
-
-    % Corrupt the data
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig
-        file:pwrite(RawFd, HeaderPos+21, <<"some data goes here!">>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the header data should read the previous header.")
-    end),
-
-    ok.
-
-check_header_recovery(CheckFun) ->
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-    {ok, RawFd} = file:open(filename(), [read, write, raw, binary]),
-
-    {ok, _} = write_random_data(Fd),
-    ExpectHeader = {some_atom, <<"a binary">>, 756},
-    ok = couch_file:write_header(Fd, ExpectHeader),
-
-    {ok, HeaderPos} = write_random_data(Fd),
-    ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}),
-
-    CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos),
-
-    ok = file:close(RawFd),
-    ok = couch_file:close(Fd),
-    ok.
-
-write_random_data(Fd) ->
-    write_random_data(Fd, 100 + random:uniform(1000)).
-
-write_random_data(Fd, 0) ->
-    {ok, Bytes} = couch_file:bytes(Fd),
-    {ok, (1 + Bytes div sizeblock()) * sizeblock()};
-write_random_data(Fd, N) ->
-    Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]],
-    Term = lists:nth(random:uniform(4) + 1, Choices),
-    {ok, _, _} = couch_file:append_term(Fd, Term),
-    write_random_data(Fd, N-1).
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/020-btree-basics.t
----------------------------------------------------------------------
diff --git a/test/etap/020-btree-basics.t b/test/etap/020-btree-basics.t
deleted file mode 100755
index b0fb2d2..0000000
--- a/test/etap/020-btree-basics.t
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() -> test_util:build_file("test/etap/temp.020").
-rows() -> 250.
-
--record(btree, {
-    fd,
-    root,
-    extract_kv,
-    assemble_kv,
-    less,
-    reduce,
-    compression
-}).
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(75),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-%% @todo Determine if this number should be greater to see if the btree was
-%% broken into multiple nodes. AKA "How do we appropiately detect if multiple
-%% nodes were created."
-test()->
-    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, rows())],
-    etap:ok(test_kvs(Sorted), "Testing sorted keys"),
-    etap:ok(test_kvs(lists:reverse(Sorted)), "Testing reversed sorted keys"),
-    etap:ok(test_kvs(shuffle(Sorted)), "Testing shuffled keys."),
-    ok.
-
-test_kvs(KeyValues) ->
-    ReduceFun = fun
-        (reduce, KVs) ->
-            length(KVs);
-        (rereduce, Reds) ->
-            lists:sum(Reds)
-    end,
-
-    Keys = [K || {K, _} <- KeyValues],
-
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none}]),
-    etap:ok(is_record(Btree, btree), "Created btree is really a btree record"),
-    etap:is(Btree#btree.fd, Fd, "Btree#btree.fd is set correctly."),
-    etap:is(Btree#btree.root, nil, "Btree#btree.root is set correctly."),
-    etap:is(0, couch_btree:size(Btree), "Empty btrees have a 0 size."),
-
-    Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]),
-    etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"),
-    {ok, _, EmptyRes} = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0),
-    etap:is(EmptyRes, 0, "Folding over an empty btree"),
-
-    {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []),
-    etap:ok(test_btree(Btree2, KeyValues),
-        "Adding all keys at once returns a complete btree."),
-
-    etap:is((couch_btree:size(Btree2) > 0), true,
-            "Non empty btrees have a size > 0."),
-    etap:is((couch_btree:size(Btree2) =< couch_file:bytes(Fd)), true,
-            "Btree size is <= file size."),
-
-    etap:fun_is(
-        fun
-            ({ok, {kp_node, _}}) -> true;
-            (_) -> false
-        end,
-        couch_file:pread_term(Fd, element(1, Btree2#btree.root)),
-        "Btree root pointer is a kp_node."
-    ),
-
-    {ok, Btree3} = couch_btree:add_remove(Btree2, [], Keys),
-    etap:ok(test_btree(Btree3, []),
-        "Removing all keys at once returns an empty btree."),
-
-    etap:is(0, couch_btree:size(Btree3),
-            "After removing all keys btree size is 0."),
-
-    {Btree4, _} = lists:foldl(fun(KV, {BtAcc, PrevSize}) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-        case couch_btree:size(BtAcc2) > PrevSize of
-        true ->
-            ok;
-        false ->
-            etap:bail("After inserting a value, btree size did not increase.")
-        end,
-        {BtAcc2, couch_btree:size(BtAcc2)}
-    end, {Btree3, couch_btree:size(Btree3)}, KeyValues),
-
-    etap:ok(test_btree(Btree4, KeyValues),
-        "Adding all keys one at a time returns a complete btree."),
-    etap:is((couch_btree:size(Btree4) > 0), true,
-            "Non empty btrees have a size > 0."),
-
-    {Btree5, _} = lists:foldl(fun({K, _}, {BtAcc, PrevSize}) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
-        case couch_btree:size(BtAcc2) < PrevSize of
-        true ->
-            ok;
-        false ->
-            etap:bail("After removing a key, btree size did not decrease.")
-        end,
-        {BtAcc2, couch_btree:size(BtAcc2)}
-    end, {Btree4, couch_btree:size(Btree4)}, KeyValues),
-    etap:ok(test_btree(Btree5, []),
-        "Removing all keys one at a time returns an empty btree."),
-    etap:is(0, couch_btree:size(Btree5),
-            "After removing all keys, one by one, btree size is 0."),
-
-    KeyValuesRev = lists:reverse(KeyValues),
-    {Btree6, _} = lists:foldl(fun(KV, {BtAcc, PrevSize}) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-        case couch_btree:size(BtAcc2) > PrevSize of
-        true ->
-            ok;
-        false ->
-            etap:is(false, true,
-                   "After inserting a value, btree size did not increase.")
-        end,
-        {BtAcc2, couch_btree:size(BtAcc2)}
-    end, {Btree5, couch_btree:size(Btree5)}, KeyValuesRev),
-    etap:ok(test_btree(Btree6, KeyValues),
-        "Adding all keys in reverse order returns a complete btree."),
-
-    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
-        case Count rem 2 == 0 of
-            true-> {Count+1, [X | Left], Right};
-            false -> {Count+1, Left, [X | Right]}
-        end
-    end, {0, [], []}, KeyValues),
-
-    etap:ok(test_add_remove(Btree6, Rem2Keys0, Rem2Keys1),
-        "Add/Remove every other key."),
-
-    etap:ok(test_add_remove(Btree6, Rem2Keys1, Rem2Keys0),
-        "Add/Remove opposite every other key."),
-
-    Size1 = couch_btree:size(Btree6),
-    {ok, Btree7} = couch_btree:add_remove(Btree6, [], [K||{K,_}<-Rem2Keys1]),
-    Size2 = couch_btree:size(Btree7),
-    etap:is((Size2 < Size1), true, "Btree size decreased"),
-    {ok, Btree8} = couch_btree:add_remove(Btree7, [], [K||{K,_}<-Rem2Keys0]),
-    Size3 = couch_btree:size(Btree8),
-    etap:is((Size3 < Size2), true, "Btree size decreased"),
-    etap:is(Size3, 0, "Empty btree has size 0."),
-    etap:ok(test_btree(Btree8, []),
-        "Removing both halves of every other key returns an empty btree."),
-
-    %% Third chunk (close out)
-    etap:is(couch_file:close(Fd), ok, "closing out"),
-    true.
-
-test_btree(Btree, KeyValues) ->
-    ok = test_key_access(Btree, KeyValues),
-    ok = test_lookup_access(Btree, KeyValues),
-    ok = test_final_reductions(Btree, KeyValues),
-    ok = test_traversal_callbacks(Btree, KeyValues),
-    true.
-
-test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
-    Btree2 = lists:foldl(fun({K, _}, BtAcc) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
-        BtAcc2
-    end, Btree, OutKeyValues),
-    true = test_btree(Btree2, RemainingKeyValues),
-
-    Btree3 = lists:foldl(fun(KV, BtAcc) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-        BtAcc2
-    end, Btree2, OutKeyValues),
-    true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues).
-
-test_key_access(Btree, List) ->
-    FoldFun = fun(Element, {[HAcc|TAcc], Count}) ->
-        case Element == HAcc of
-            true -> {ok, {TAcc, Count + 1}};
-            _ -> {ok, {TAcc, Count + 1}}
-        end
-    end,
-    Length = length(List),
-    Sorted = lists:sort(List),
-    {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}),
-    {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun, {Sorted, 0}, [{dir, rev}]),
-    ok.
-
-test_lookup_access(Btree, KeyValues) ->
-    FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end,
-    lists:foreach(fun({Key, Value}) ->
-        [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]),
-        {ok, _, true} = couch_btree:foldl(Btree, FoldFun, {Key, Value}, [{start_key, Key}])
-    end, KeyValues).
-
-test_final_reductions(Btree, KeyValues) ->
-    KVLen = length(KeyValues),
-    FoldLFun = fun(_X, LeadingReds, Acc) ->
-        CountToStart = KVLen div 3 + Acc,
-        CountToStart = couch_btree:final_reduce(Btree, LeadingReds),
-        {ok, Acc+1}
-    end,
-    FoldRFun = fun(_X, LeadingReds, Acc) ->
-        CountToEnd = KVLen - KVLen div 3 + Acc,
-        CountToEnd = couch_btree:final_reduce(Btree, LeadingReds),
-        {ok, Acc+1}
-    end,
-    {LStartKey, _} = case KVLen of
-        0 -> {nil, nil};
-        _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues))
-    end,
-    {RStartKey, _} = case KVLen of
-        0 -> {nil, nil};
-        _ -> lists:nth(KVLen div 3, lists:sort(KeyValues))
-    end,
-    {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0, [{start_key, LStartKey}]),
-    {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0, [{dir, rev}, {start_key, RStartKey}]),
-    KVLen = FoldLRed + FoldRRed,
-    ok.
-
-test_traversal_callbacks(Btree, _KeyValues) ->
-    FoldFun =
-    fun
-        (visit, _GroupedKey, _Unreduced, Acc) ->
-            {ok, Acc andalso false};
-        (traverse, _LK, _Red, Acc) ->
-            {skip, Acc andalso true}
-    end,
-    % With 250 items the root is a kp. Always skipping should reduce to true.
-    {ok, _, true} = couch_btree:fold(Btree, FoldFun, true, [{dir, fwd}]),
-    ok.
-
-shuffle(List) ->
-   randomize(round(math:log(length(List)) + 0.5), List).
-
-randomize(1, List) ->
-   randomize(List);
-randomize(T, List) ->
-    lists:foldl(fun(_E, Acc) ->
-        randomize(Acc)
-    end, randomize(List), lists:seq(1, (T - 1))).
-
-randomize(List) ->
-    D = lists:map(fun(A) ->
-        {random:uniform(), A}
-    end, List),
-    {_, D1} = lists:unzip(lists:keysort(1, D)),
-    D1.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/021-btree-reductions.t
----------------------------------------------------------------------
diff --git a/test/etap/021-btree-reductions.t b/test/etap/021-btree-reductions.t
deleted file mode 100755
index e80ac2d..0000000
--- a/test/etap/021-btree-reductions.t
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() -> "./test/etap/temp.021".
-rows() -> 1000.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(20),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test()->
-    ReduceFun = fun
-        (reduce, KVs) -> length(KVs);
-        (rereduce, Reds) -> lists:sum(Reds)
-    end,
-
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-    {ok, Btree} = couch_btree:open(nil, Fd, [{reduce, ReduceFun}]),
-
-    % Create a list, of {"even", Value} or {"odd", Value} pairs.
-    {_, EvenOddKVs} = lists:foldl(fun(Idx, {Key, Acc}) ->
-        case Key of
-            "even" -> {"odd", [{{Key, Idx}, 1} | Acc]};
-            _ -> {"even", [{{Key, Idx}, 1} | Acc]}
-        end
-    end, {"odd", []}, lists:seq(1, rows())),
-
-    {ok, Btree2} = couch_btree:add_remove(Btree, EvenOddKVs, []),
-
-    GroupFun = fun({K1, _}, {K2, _}) -> K1 == K2 end,
-    FoldFun = fun(GroupedKey, Unreduced, Acc) ->
-        {ok, [{GroupedKey, couch_btree:final_reduce(Btree2, Unreduced)} | Acc]}
-    end,
-
-    {SK1, EK1} = {{"even", -1}, {"even", foo}},
-    {SK2, EK2} = {{"odd", -1}, {"odd", foo}},
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}]),
-        "Reduction works with no specified direction, startkey, or endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, fwd}]),
-        "Reducing forward works with no startkey or endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, rev}]),
-        "Reducing backwards works with no startkey or endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK2}]),
-        "Reducing works over the entire range with startkey and endkey set."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"even", _}, 500}]}) -> true;
-            (_) -> false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]),
-        "Reducing forward over first half works with a startkey and endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}]}) -> true;
-            (_) -> false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]),
-        "Reducing forward over second half works with second startkey and endkey"
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}]}) -> true;
-            (_) -> false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK2}]),
-        "Reducing in reverse works after swapping the startkey and endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK1}]),
-        "Reducing in reverse results in reversed accumulator."
-    ),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key, {"odd", rows() + 1}}
-        ]),
-        {ok, [{{"odd", 1}, 500}, {{"even", 2}, 500}]},
-        "Right fold reduce value for whole range with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key_gt, {"odd", 999}}
-        ]),
-        {ok, [{{"odd", 1}, 499}, {{"even", 2}, 500}]},
-        "Right fold reduce value for whole range without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 500}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for whole reversed range with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key_gt, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 499}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for whole reversed range without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key, {"odd", 499}}
-        ]),
-        {ok, [{{"odd", 1}, 250}, {{"even", 2}, 500}]},
-        "Right fold reduce value for first half with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key_gt, {"odd", 499}}
-        ]),
-        {ok, [{{"odd", 1}, 249}, {{"even", 2}, 500}]},
-        "Right fold reduce value for first half without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key, {"even", 500}}
-        ]),
-        {ok, [{{"even", 1000}, 251}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for first half reversed with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key_gt, {"even", 500}}
-        ]),
-        {ok, [{{"even", 1000}, 250}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for first half reversed without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 500}}, {end_key, {"odd", 999}}
-        ]),
-        {ok, [{{"odd", 1}, 500}, {{"even", 500}, 251}]},
-        "Right fold reduce value for second half with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 500}}, {end_key_gt, {"odd", 999}}
-        ]),
-        {ok, [{{"odd", 1}, 499}, {{"even", 500}, 251}]},
-        "Right fold reduce value for second half without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 501}}, {end_key, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 500}, {{"odd", 501}, 251}]},
-        "Right fold reduce value for second half reversed with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 501}}, {end_key_gt, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 499}, {{"odd", 501}, 251}]},
-        "Right fold reduce value for second half reversed without inclusive end key"),
-
-    couch_file:close(Fd).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/030-doc-from-json.t
----------------------------------------------------------------------
diff --git a/test/etap/030-doc-from-json.t b/test/etap/030-doc-from-json.t
deleted file mode 100755
index b0c393e..0000000
--- a/test/etap/030-doc-from-json.t
+++ /dev/null
@@ -1,236 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% XXX: Figure out how to -include("couch_db.hrl")
--record(doc, {id= <<"">>, revs={0, []}, body={[]},
-            atts=[], deleted=false, meta=[]}).
--record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
-            encoding=identity}).
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(26),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(test_util:config_files()),
-    couch_config:set("attachments", "compression_level", "0", false),
-    ok = test_from_json_success(),
-    ok = test_from_json_errors(),
-    ok.
-
-test_from_json_success() ->
-    Cases = [
-        {
-            {[]},
-            #doc{},
-            "Return an empty document for an empty JSON object."
-        },
-        {
-            {[{<<"_id">>, <<"zing!">>}]},
-            #doc{id= <<"zing!">>},
-            "Parses document ids."
-        },
-        {
-            {[{<<"_id">>, <<"_design/foo">>}]},
-            #doc{id= <<"_design/foo">>},
-            "_design/document ids."
-        },
-        {
-            {[{<<"_id">>, <<"_local/bam">>}]},
-            #doc{id= <<"_local/bam">>},
-            "_local/document ids."
-        },
-        {
-            {[{<<"_rev">>, <<"4-230234">>}]},
-            #doc{revs={4, [<<"230234">>]}},
-            "_rev stored in revs."
-        },
-        {
-            {[{<<"soap">>, 35}]},
-            #doc{body={[{<<"soap">>, 35}]}},
-            "Non underscore prefixed fields stored in body."
-        },
-        {
-            {[{<<"_attachments">>, {[
-                {<<"my_attachment.fu">>, {[
-                    {<<"stub">>, true},
-                    {<<"content_type">>, <<"application/awesome">>},
-                    {<<"length">>, 45}
-                ]}},
-                {<<"noahs_private_key.gpg">>, {[
-                    {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
-                    {<<"content_type">>, <<"application/pgp-signature">>}
-                ]}}
-            ]}}]},
-            #doc{atts=[
-                #att{
-                    name = <<"my_attachment.fu">>,
-                    data = stub,
-                    type = <<"application/awesome">>,
-                    att_len = 45,
-                    disk_len = 45,
-                    revpos = nil
-                },
-                #att{
-                    name = <<"noahs_private_key.gpg">>,
-                    data = <<"I have a pet fish!">>,
-                    type = <<"application/pgp-signature">>,
-                    att_len = 18,
-                    disk_len = 18,
-                    revpos = 0
-                }
-            ]},
-            "Attachments are parsed correctly."
-        },
-        {
-            {[{<<"_deleted">>, true}]},
-            #doc{deleted=true},
-            "_deleted controls the deleted field."
-        },
-        {
-            {[{<<"_deleted">>, false}]},
-            #doc{},
-            "{\"_deleted\": false} is ok."
-        },
-        {
-            {[
-                {<<"_revisions">>, {[
-                    {<<"start">>, 4},
-                    {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}
-                ]}},
-                {<<"_rev">>, <<"6-something">>}
-            ]},
-            #doc{revs={4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
-            "_revisions attribute are preferred to _rev."
-        },
-        {
-            {[{<<"_revs_info">>, dropping}]},
-            #doc{},
-            "Drops _revs_info."
-        },
-        {
-            {[{<<"_local_seq">>, dropping}]},
-            #doc{},
-            "Drops _local_seq."
-        },
-        {
-            {[{<<"_conflicts">>, dropping}]},
-            #doc{},
-            "Drops _conflicts."
-        },
-        {
-            {[{<<"_deleted_conflicts">>, dropping}]},
-            #doc{},
-            "Drops _deleted_conflicts."
-        }
-    ],
-
-    lists:foreach(fun({EJson, Expect, Mesg}) ->
-        etap:is(couch_doc:from_json_obj(EJson), Expect, Mesg)
-    end, Cases),
-    ok.
-
-test_from_json_errors() ->
-    Cases = [
-        {
-            [],
-            {bad_request, "Document must be a JSON object"},
-            "arrays are invalid"
-        },
-        {
-            4,
-            {bad_request, "Document must be a JSON object"},
-            "integers are invalid"
-        },
-        {
-            true,
-            {bad_request, "Document must be a JSON object"},
-            "literals are invalid"
-        },
-        {
-            {[{<<"_id">>, {[{<<"foo">>, 5}]}}]},
-            {bad_request, <<"Document id must be a string">>},
-            "Document id must be a string."
-        },
-        {
-            {[{<<"_id">>, <<"_random">>}]},
-            {bad_request,
-                <<"Only reserved document ids may start with underscore.">>},
-            "Disallow arbitrary underscore prefixed docids."
-        },
-        {
-            {[{<<"_rev">>, 5}]},
-            {bad_request, <<"Invalid rev format">>},
-            "_rev must be a string"
-        },
-        {
-            {[{<<"_rev">>, "foobar"}]},
-            {bad_request, <<"Invalid rev format">>},
-            "_rev must be %d-%s"
-        },
-        {
-            {[{<<"_rev">>, "foo-bar"}]},
-            "Error if _rev's integer expection is broken."
-        },
-        {
-            {[{<<"_revisions">>, {[{<<"start">>, true}]}}]},
-            {doc_validation, "_revisions.start isn't an integer."},
-            "_revisions.start must be an integer."
-        },
-        {
-            {[{<<"_revisions">>, {[
-                {<<"start">>, 0},
-                {<<"ids">>, 5}
-            ]}}]},
-            {doc_validation, "_revisions.ids isn't a array."},
-            "_revions.ids must be a list."
-        },
-        {
-            {[{<<"_revisions">>, {[
-                {<<"start">>, 0},
-                {<<"ids">>, [5]}
-            ]}}]},
-            {doc_validation, "RevId isn't a string"},
-            "Revision ids must be strings."
-        },
-        {
-            {[{<<"_something">>, 5}]},
-            {doc_validation, <<"Bad special document member: _something">>},
-            "Underscore prefix fields are reserved."
-        }
-    ],
-
-    lists:foreach(fun
-        ({EJson, Expect, Mesg}) ->
-            Error = (catch couch_doc:from_json_obj(EJson)),
-            etap:is(Error, Expect, Mesg);
-        ({EJson, Mesg}) ->
-            try
-                couch_doc:from_json_obj(EJson),
-                etap:ok(false, "Conversion failed to raise an exception.")
-            catch
-                _:_ -> etap:ok(true, Mesg)
-            end
-    end, Cases),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/031-doc-to-json.t
----------------------------------------------------------------------
diff --git a/test/etap/031-doc-to-json.t b/test/etap/031-doc-to-json.t
deleted file mode 100755
index ce950f9..0000000
--- a/test/etap/031-doc-to-json.t
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% XXX: Figure out how to -include("couch_db.hrl")
--record(doc, {id= <<"">>, revs={0, []}, body={[]},
-            atts=[], deleted=false, meta=[]}).
--record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
-            encoding=identity}).
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(12),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(test_util:config_files()),
-    couch_config:set("attachments", "compression_level", "0", false),
-    ok = test_to_json_success(),
-    ok.
-
-test_to_json_success() ->
-    Cases = [
-        {
-            #doc{},
-            {[{<<"_id">>, <<"">>}]},
-            "Empty docs are {\"_id\": \"\"}"
-        },
-        {
-            #doc{id= <<"foo">>},
-            {[{<<"_id">>, <<"foo">>}]},
-            "_id is added."
-        },
-        {
-            #doc{revs={5, ["foo"]}},
-            {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]},
-            "_rev is added."
-        },
-        {
-            [revs],
-            #doc{revs={5, [<<"first">>, <<"second">>]}},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_rev">>, <<"5-first">>},
-                {<<"_revisions">>, {[
-                    {<<"start">>, 5},
-                    {<<"ids">>, [<<"first">>, <<"second">>]}
-                ]}}
-            ]},
-            "_revisions include with revs option"
-        },
-        {
-            #doc{body={[{<<"foo">>, <<"bar">>}]}},
-            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]},
-            "Arbitrary fields are added."
-        },
-        {
-            #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}},
-            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
-            "Deleted docs no longer drop body members."
-        },
-        {
-            #doc{meta=[
-                {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_revs_info">>, [
-                    {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
-                    {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
-                ]}
-            ]},
-            "_revs_info field is added correctly."
-        },
-        {
-            #doc{meta=[{local_seq, 5}]},
-            {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]},
-            "_local_seq is added as an integer."
-        },
-        {
-            #doc{meta=[{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]}
-            ]},
-            "_conflicts is added as an array of strings."
-        },
-        {
-            #doc{meta=[{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
-            ]},
-            "_deleted_conflicsts is added as an array of strings."
-        },
-        {
-            #doc{atts=[
-                #att{
-                    name = <<"big.xml">>, 
-                    type = <<"xml/sucks">>, 
-                    data = fun() -> ok end,
-                    revpos = 1,
-                    att_len = 400,
-                    disk_len = 400
-                },
-                #att{
-                    name = <<"fast.json">>, 
-                    type = <<"json/ftw">>, 
-                    data = <<"{\"so\": \"there!\"}">>,
-                    revpos = 1,
-                    att_len = 16,
-                    disk_len = 16
-                }
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_attachments">>, {[
-                    {<<"big.xml">>, {[
-                        {<<"content_type">>, <<"xml/sucks">>},
-                        {<<"revpos">>, 1},
-                        {<<"length">>, 400},
-                        {<<"stub">>, true}
-                    ]}},
-                    {<<"fast.json">>, {[
-                        {<<"content_type">>, <<"json/ftw">>},
-                        {<<"revpos">>, 1},
-                        {<<"length">>, 16},
-                        {<<"stub">>, true}
-                    ]}}
-                ]}}
-            ]},
-            "Attachments attached as stubs only include a length."
-        },
-        {
-            [attachments],
-            #doc{atts=[
-                #att{
-                    name = <<"stuff.txt">>,
-                    type = <<"text/plain">>,
-                    data = fun() -> <<"diet pepsi">> end,
-                    revpos = 1,
-                    att_len = 10,
-                    disk_len = 10
-                },
-                #att{
-                    name = <<"food.now">>,
-                    type = <<"application/food">>,
-                    revpos = 1,
-                    data = <<"sammich">>
-                }
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_attachments">>, {[
-                    {<<"stuff.txt">>, {[
-                        {<<"content_type">>, <<"text/plain">>},
-                        {<<"revpos">>, 1},
-                        {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
-                    ]}},
-                    {<<"food.now">>, {[
-                        {<<"content_type">>, <<"application/food">>},
-                        {<<"revpos">>, 1},
-                        {<<"data">>, <<"c2FtbWljaA==">>}
-                    ]}}
-                ]}}
-            ]},
-            "Attachments included inline with attachments option."
-        }
-    ],
-
-    lists:foreach(fun
-        ({Doc, EJson, Mesg}) ->
-            etap:is(couch_doc:to_json_obj(Doc, []), EJson, Mesg);
-        ({Options, Doc, EJson, Mesg}) ->
-            etap:is(couch_doc:to_json_obj(Doc, Options), EJson, Mesg)
-    end, Cases),
-    ok.
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/040-util.t
----------------------------------------------------------------------
diff --git a/test/etap/040-util.t b/test/etap/040-util.t
deleted file mode 100755
index d57a32e..0000000
--- a/test/etap/040-util.t
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    application:start(crypto),
-
-    etap:plan(14),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    % to_existing_atom
-    etap:is(true, couch_util:to_existing_atom(true), "An atom is an atom."),
-    etap:is(foo, couch_util:to_existing_atom(<<"foo">>),
-        "A binary foo is the atom foo."),
-    etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"),
-        "A list of atoms is one munged atom."),
-
-    % implode
-    etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"),
-        "use & as separator in list."),
-
-    % trim
-    Strings = [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"],
-    etap:ok(lists:all(fun(S) -> couch_util:trim(S) == "foo" end, Strings),
-        "everything here trimmed should be foo."),
-
-    % abs_pathname
-    {ok, Cwd} = file:get_cwd(),
-    etap:is(Cwd ++ "/foo", couch_util:abs_pathname("./foo"),
-        "foo is in this directory."),
-
-    % should_flush
-    etap:ok(not couch_util:should_flush(),
-        "Not using enough memory to flush."),
-    AcquireMem = fun() ->
-        _IntsToAGazillion = lists:seq(1, 200000),
-        _LotsOfData = lists:map(
-            fun(Int) -> {Int, <<"foobar">>} end,
-        lists:seq(1, 500000)),
-        etap:ok(couch_util:should_flush(),
-            "Allocation 200K tuples puts us above the memory threshold.")
-    end,
-    AcquireMem(),
-
-    etap:ok(not couch_util:should_flush(),
-        "Checking to flush invokes GC."),
-
-    % verify
-    etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"),
-         "String comparison."),
-    etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"),
-         "String comparison (unequal lengths)."),
-    etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>),
-        "Binary comparison."),
-    etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>),
-        "Binary comparison (unequal lengths)."),
-    etap:is(false, couch_util:verify(nil, <<"ahBase3r">>),
-        "Binary comparison with atom."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/041-uuid-gen-id.ini
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen-id.ini b/test/etap/041-uuid-gen-id.ini
deleted file mode 100644
index 6886efd..0000000
--- a/test/etap/041-uuid-gen-id.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-;
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[uuids]
-algorithm = utc_id
-utc_id_suffix = bozo

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/041-uuid-gen-seq.ini
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen-seq.ini b/test/etap/041-uuid-gen-seq.ini
deleted file mode 100644
index 94cebc6..0000000
--- a/test/etap/041-uuid-gen-seq.ini
+++ /dev/null
@@ -1,19 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[uuids]
-algorithm = sequential

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/041-uuid-gen-utc.ini
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen-utc.ini b/test/etap/041-uuid-gen-utc.ini
deleted file mode 100644
index c2b8383..0000000
--- a/test/etap/041-uuid-gen-utc.ini
+++ /dev/null
@@ -1,19 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[uuids]
-algorithm = utc_random

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ec7ee43f/test/etap/041-uuid-gen.t
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen.t b/test/etap/041-uuid-gen.t
deleted file mode 100755
index 7234969..0000000
--- a/test/etap/041-uuid-gen.t
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-seq_alg_config() ->
-    test_util:source_file("test/etap/041-uuid-gen-seq.ini").
-
-utc_alg_config() ->
-    test_util:source_file("test/etap/041-uuid-gen-utc.ini").
-
-utc_id_alg_config() ->
-    test_util:source_file("test/etap/041-uuid-gen-id.ini").
-
-% Run tests and wait for the gen_servers to shutdown
-run_test(IniFiles, Test) ->
-    {ok, Pid} = couch_config:start_link(IniFiles),
-    erlang:monitor(process, Pid),
-    couch_uuids:start(),
-    Test(),
-    couch_uuids:stop(),
-    couch_config:stop(),
-    receive
-        {'DOWN', _, _, Pid, _} -> ok;
-        _Other -> etap:diag("OTHER: ~p~n", [_Other])
-    after
-        1000 -> throw({timeout_error, config_stop})
-    end.
-
-main(_) ->
-    test_util:init_code_path(),
-    application:start(crypto),
-    etap:plan(9),
-
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    TestUnique = fun() ->
-        etap:is(
-            test_unique(10000, couch_uuids:new()),
-            true,
-            "Can generate 10K unique IDs"
-        )
-    end,
-    run_test([default_config()], TestUnique),
-    run_test([default_config(), seq_alg_config()], TestUnique),
-    run_test([default_config(), utc_alg_config()], TestUnique),
-    run_test([default_config(), utc_id_alg_config()], TestUnique),
-
-    TestMonotonic = fun () ->
-        etap:is(
-            couch_uuids:new() < couch_uuids:new(),
-            true,
-            "should produce monotonically increasing ids"
-        )
-    end,
-    run_test([default_config(), seq_alg_config()], TestMonotonic),
-    run_test([default_config(), utc_alg_config()], TestMonotonic),
-    run_test([default_config(), utc_id_alg_config()], TestMonotonic),
-
-    % Pretty sure that the average of a uniform distribution is the
-    % midpoint of the range. Thus, to exceed a threshold, we need
-    % approximately Total / (Range/2 + RangeMin) samples.
-    %
-    % In our case this works out to be 8194. (0xFFF000 / 0x7FF)
-    % These tests just fudge the limits for a good generator at 25%
-    % in either direction. Technically it should be possible to generate
-    % bounds that will show if your random number generator is not
-    % sufficiently random but I hated statistics in school.
-    TestRollOver = fun() ->
-        UUID = binary_to_list(couch_uuids:new()),
-        Prefix = element(1, lists:split(26, UUID)),
-        N = gen_until_pref_change(Prefix,0),
-        etap:diag("N is: ~p~n",[N]),                           
-        etap:is(
-            N >= 5000 andalso N =< 11000,
-            true,
-            "should roll over every so often."
-        )
-    end,
-    run_test([default_config(), seq_alg_config()], TestRollOver),
-
-    TestSuffix = fun() ->
-        UUID = binary_to_list(couch_uuids:new()),
-        Suffix = get_suffix(UUID),
-        etap:is(
-            test_same_suffix(100, Suffix),
-            true,
-            "utc_id ids should have the same suffix."
-        )
-    end,
-    run_test([default_config(), utc_id_alg_config()], TestSuffix).
-
-test_unique(0, _) ->
-    true;
-test_unique(N, UUID) ->
-    case couch_uuids:new() of
-        UUID ->
-            etap:diag("N: ~p~n", [N]),
-            false;
-        Else -> test_unique(N-1, Else)
-    end.
-
-get_prefix(UUID) ->
-    element(1, lists:split(26, binary_to_list(UUID))).
-
-gen_until_pref_change(_, Count) when Count > 8251 ->
-    Count;
-gen_until_pref_change(Prefix, N) ->
-    case get_prefix(couch_uuids:new()) of
-        Prefix -> gen_until_pref_change(Prefix, N+1);
-        _ -> N
-    end.
-
-get_suffix(UUID) when is_binary(UUID)->
-    get_suffix(binary_to_list(UUID));
-get_suffix(UUID) ->
-    element(2, lists:split(14, UUID)).
-
-test_same_suffix(0, _) ->
-    true;
-test_same_suffix(N, Suffix) ->
-    case get_suffix(couch_uuids:new()) of
-        Suffix -> test_same_suffix(N-1, Suffix);
-        _ -> false
-    end.


[12/12] git commit: updated refs/heads/1994-merge-rcouch to 5b9e825

Posted by be...@apache.org.
move src/apps/couch_rel -> src/support/couch_rel

couch_rel is not a couchdb application, it has for only purpose to
provide etop to the release.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/5b9e825d
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/5b9e825d
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/5b9e825d

Branch: refs/heads/1994-merge-rcouch
Commit: 5b9e825d84cb6a109c74bb7870fe1fe72269bc09
Parents: ec7ee43
Author: Benoit Chesneau <be...@apache.org>
Authored: Thu Jan 9 01:37:03 2014 +0100
Committer: Benoit Chesneau <be...@apache.org>
Committed: Thu Jan 9 01:37:03 2014 +0100

----------------------------------------------------------------------
 Makefile                                 |   3 +
 rebar.config                             |   2 +-
 rel/reltool.config.script                |   2 +-
 src/apps/couch_rel/ebin/couch_rel.app    |  18 ----
 src/apps/couch_rel/src/etop_txt.erl      | 134 --------------------------
 src/support/couch_rel/ebin/couch_rel.app |  18 ++++
 src/support/couch_rel/src/etop_txt.erl   | 134 ++++++++++++++++++++++++++
 7 files changed, 157 insertions(+), 154 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/Makefile
----------------------------------------------------------------------
diff --git a/Makefile b/Makefile
index 2b09f43..01858cd 100644
--- a/Makefile
+++ b/Makefile
@@ -43,3 +43,6 @@ rel: deps compile generate
 
 relclean:
 	@rm -rf rel/apache-couchdb
+
+doc:
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/rebar.config
----------------------------------------------------------------------
diff --git a/rebar.config b/rebar.config
index 8950111..4901462 100644
--- a/rebar.config
+++ b/rebar.config
@@ -44,7 +44,7 @@
     "src/apps/couch_replicator",
     "src/apps/couch_plugins",
     "src/apps/couch_dbupdates",
-    "src/apps/couch_rel",
+    "src/support/couch_rel",
     "rel"
 ]}.
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/rel/reltool.config.script
----------------------------------------------------------------------
diff --git a/rel/reltool.config.script b/rel/reltool.config.script
index 7e2c6bc..a63c908 100644
--- a/rel/reltool.config.script
+++ b/rel/reltool.config.script
@@ -148,7 +148,7 @@ CouchJSName = proplists:get_value(couchjs_name, Cfg, "couchjs"),
 
         %% misc
         {mkdir, "lib/couch-patches"},
-        {copy, "../src/apps/couch_rel/ebin/etop_txt.beam", "lib/couch-patches"}
+        {copy, "../src/support/couch_rel/ebin/etop_txt.beam", "lib/couch-patches"}
 
     ]}
 ].

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/src/apps/couch_rel/ebin/couch_rel.app
----------------------------------------------------------------------
diff --git a/src/apps/couch_rel/ebin/couch_rel.app b/src/apps/couch_rel/ebin/couch_rel.app
deleted file mode 100644
index 2d207c9..0000000
--- a/src/apps/couch_rel/ebin/couch_rel.app
+++ /dev/null
@@ -1,18 +0,0 @@
-%% -*- tab-width: 4;erlang-indent-level: 4;indent-tabs-mode: nil -*-
-%% ex: ft=erlang ts=4 sw=4 et
-%%
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-{application, couch_rel, [
-    {description,"couch extremely-early boot items"},
-        {modules, [etop_txt]}]}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/src/apps/couch_rel/src/etop_txt.erl
----------------------------------------------------------------------
diff --git a/src/apps/couch_rel/src/etop_txt.erl b/src/apps/couch_rel/src/etop_txt.erl
deleted file mode 100644
index 031f5d8..0000000
--- a/src/apps/couch_rel/src/etop_txt.erl
+++ /dev/null
@@ -1,134 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
--module(etop_txt).
--author('siri@erix.ericsson.se').
--author('scott@basho.com').
-
-%%-compile(export_all).
--export([init/1,stop/1]).
--export([do_update/3]).
-
-%%-include("etop.hrl").
--record(etop_proc_info,
-        {pid,
-         mem=0,
-         reds=0,
-         name,
-         runtime=0,
-         cf,
-         mq=0}).
--record(etop_info,
-        {now = {0, 0, 0},
-         n_procs = 0,
-         wall_clock = {0, 0},
-         runtime = {0, 0},
-         run_queue = 0,
-         alloc_areas = [],
-         memi = [{total, 0},
-                 {processes, 0},
-                 {ets, 0},
-                 {atom, 0},
-                 {code, 0},
-                 {binary, 0}],
-         procinfo = []
-        }).
-%%-include("etop_defs.hrl").
--define(SYSFORM,
-        " ~-72w~10s~n"
-        " Load:  cpu  ~8w               Memory:  total    ~8w    binary   ~8w~n"
-        "        procs~8w                        processes~8w    code     ~8w~n"
-        "        runq ~8w                        atom     ~8w    ets      ~8w~n").
--record(opts, {node=node(), port = 8415, accum = false, intv = 5000, lines = 10,
-               width = 700, height = 340, sort = runtime, tracing = on,
-               %% Other state information
-               out_mod=etop_gui, out_proc, server, host, tracer, store,
-               accum_tab, remote}).
-
--import(etop,[loadinfo/1,meminfo/2]).
--import(etop_gui,[formatmfa/1,to_list/1]).
-
--define(PROCFORM,"~-20w~-25s~8w~11w~11w~11w ~-40s~n").
-
-stop(Pid) -> Pid ! stop.
-
-init(Config) ->
-    loop(Config).
-
-loop(Config) ->
-    Info = do_update(Config),
-    receive
-	stop -> stopped;
-	{dump,Fd} -> do_update(Fd,Info,Config), loop(Config);
-	{config,_,Config1} -> loop(Config1)
-    after Config#opts.intv-500 -> loop(Config)
-    end.
-
-do_update(Config) ->
-    Info = etop:update(Config),
-    do_update(standard_io,Info,Config).
-
-do_update(Fd,Info,Config) ->
-    {Cpu,NProcs,RQ,Clock} = loadinfo(Info),
-    io:nl(Fd),
-    writedoubleline(Fd),
-    case Info#etop_info.memi of
-	undefined ->
-	    io:fwrite(Fd, " ~-72w~10s~n"
-		      " Load:  cpu  ~8w~n"
-		      "        procs~8w~n"
-		      "        runq ~8w~n",
-		      [Config#opts.node,Clock,
-		       Cpu,NProcs,RQ]);
-	Memi ->
-	    [Tot,Procs,Atom,Bin,Code,Ets] =
-		meminfo(Memi, [total,processes,atom,binary,code,ets]),
-	    io:fwrite(Fd, ?SYSFORM,
-		      [Config#opts.node,Clock,
-		       Cpu,Tot,Bin,
-		       NProcs,Procs,Code,
-		       RQ,Atom,Ets])
-    end,
-    io:nl(Fd),
-    writepinfo_header(Fd),
-    writesingleline(Fd),
-    writepinfo(Fd,Info#etop_info.procinfo),
-    %%writedoubleline(Fd),
-    %%io:nl(Fd),
-    Info.
-
-writepinfo_header(Fd) ->
-    io:fwrite(Fd,"Pid                 Name or Initial Func         Time       Reds     Memory       MsgQ Current Function~n",[]).
-
-writesingleline(Fd) ->
-    io:fwrite(Fd,"-------------------------------------------------------------------------------------------------------------------------------~n",[]).
-writedoubleline(Fd) ->
-    io:fwrite(Fd,"===============================================================================================================================~n",[]).
-
-writepinfo(Fd,[#etop_proc_info{pid=Pid,
-			       mem=Mem,
-			       reds=Reds,
-			       name=Name,
-			       runtime=Time,
-			       cf=MFA,
-			       mq=MQ}
-	       |T]) ->
-    io:fwrite(Fd,?PROCFORM,[Pid,to_list(Name),Time,Reds,Mem,MQ,formatmfa(MFA)]),
-    writepinfo(Fd,T);
-writepinfo(_Fd,[]) ->
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/src/support/couch_rel/ebin/couch_rel.app
----------------------------------------------------------------------
diff --git a/src/support/couch_rel/ebin/couch_rel.app b/src/support/couch_rel/ebin/couch_rel.app
new file mode 100644
index 0000000..2d207c9
--- /dev/null
+++ b/src/support/couch_rel/ebin/couch_rel.app
@@ -0,0 +1,18 @@
+%% -*- tab-width: 4;erlang-indent-level: 4;indent-tabs-mode: nil -*-
+%% ex: ft=erlang ts=4 sw=4 et
+%%
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+{application, couch_rel, [
+    {description,"couch extremely-early boot items"},
+        {modules, [etop_txt]}]}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5b9e825d/src/support/couch_rel/src/etop_txt.erl
----------------------------------------------------------------------
diff --git a/src/support/couch_rel/src/etop_txt.erl b/src/support/couch_rel/src/etop_txt.erl
new file mode 100644
index 0000000..031f5d8
--- /dev/null
+++ b/src/support/couch_rel/src/etop_txt.erl
@@ -0,0 +1,134 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(etop_txt).
+-author('siri@erix.ericsson.se').
+-author('scott@basho.com').
+
+%%-compile(export_all).
+-export([init/1,stop/1]).
+-export([do_update/3]).
+
+%%-include("etop.hrl").
+-record(etop_proc_info,
+        {pid,
+         mem=0,
+         reds=0,
+         name,
+         runtime=0,
+         cf,
+         mq=0}).
+-record(etop_info,
+        {now = {0, 0, 0},
+         n_procs = 0,
+         wall_clock = {0, 0},
+         runtime = {0, 0},
+         run_queue = 0,
+         alloc_areas = [],
+         memi = [{total, 0},
+                 {processes, 0},
+                 {ets, 0},
+                 {atom, 0},
+                 {code, 0},
+                 {binary, 0}],
+         procinfo = []
+        }).
+%%-include("etop_defs.hrl").
+-define(SYSFORM,
+        " ~-72w~10s~n"
+        " Load:  cpu  ~8w               Memory:  total    ~8w    binary   ~8w~n"
+        "        procs~8w                        processes~8w    code     ~8w~n"
+        "        runq ~8w                        atom     ~8w    ets      ~8w~n").
+-record(opts, {node=node(), port = 8415, accum = false, intv = 5000, lines = 10,
+               width = 700, height = 340, sort = runtime, tracing = on,
+               %% Other state information
+               out_mod=etop_gui, out_proc, server, host, tracer, store,
+               accum_tab, remote}).
+
+-import(etop,[loadinfo/1,meminfo/2]).
+-import(etop_gui,[formatmfa/1,to_list/1]).
+
+-define(PROCFORM,"~-20w~-25s~8w~11w~11w~11w ~-40s~n").
+
+stop(Pid) -> Pid ! stop.
+
+init(Config) ->
+    loop(Config).
+
+loop(Config) ->
+    Info = do_update(Config),
+    receive
+	stop -> stopped;
+	{dump,Fd} -> do_update(Fd,Info,Config), loop(Config);
+	{config,_,Config1} -> loop(Config1)
+    after Config#opts.intv-500 -> loop(Config)
+    end.
+
+do_update(Config) ->
+    Info = etop:update(Config),
+    do_update(standard_io,Info,Config).
+
+do_update(Fd,Info,Config) ->
+    {Cpu,NProcs,RQ,Clock} = loadinfo(Info),
+    io:nl(Fd),
+    writedoubleline(Fd),
+    case Info#etop_info.memi of
+	undefined ->
+	    io:fwrite(Fd, " ~-72w~10s~n"
+		      " Load:  cpu  ~8w~n"
+		      "        procs~8w~n"
+		      "        runq ~8w~n",
+		      [Config#opts.node,Clock,
+		       Cpu,NProcs,RQ]);
+	Memi ->
+	    [Tot,Procs,Atom,Bin,Code,Ets] =
+		meminfo(Memi, [total,processes,atom,binary,code,ets]),
+	    io:fwrite(Fd, ?SYSFORM,
+		      [Config#opts.node,Clock,
+		       Cpu,Tot,Bin,
+		       NProcs,Procs,Code,
+		       RQ,Atom,Ets])
+    end,
+    io:nl(Fd),
+    writepinfo_header(Fd),
+    writesingleline(Fd),
+    writepinfo(Fd,Info#etop_info.procinfo),
+    %%writedoubleline(Fd),
+    %%io:nl(Fd),
+    Info.
+
+writepinfo_header(Fd) ->
+    io:fwrite(Fd,"Pid                 Name or Initial Func         Time       Reds     Memory       MsgQ Current Function~n",[]).
+
+writesingleline(Fd) ->
+    io:fwrite(Fd,"-------------------------------------------------------------------------------------------------------------------------------~n",[]).
+writedoubleline(Fd) ->
+    io:fwrite(Fd,"===============================================================================================================================~n",[]).
+
+writepinfo(Fd,[#etop_proc_info{pid=Pid,
+			       mem=Mem,
+			       reds=Reds,
+			       name=Name,
+			       runtime=Time,
+			       cf=MFA,
+			       mq=MQ}
+	       |T]) ->
+    io:fwrite(Fd,?PROCFORM,[Pid,to_list(Name),Time,Reds,Mem,MQ,formatmfa(MFA)]),
+    writepinfo(Fd,T);
+writepinfo(_Fd,[]) ->
+    ok.