You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by kx...@apache.org on 2014/06/03 17:54:39 UTC

[01/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Repository: couchdb
Updated Branches:
  refs/heads/1963-eunit 16528f8d5 -> 85f27505f (forced update)


Add run script to execute eunit tests

Usage is the same as for test/etap/run:

    ./test/couchdb/run -v ${PATH}

-v runs in verbose mode, as etap does. Also, you can use make for that:

    make check-eunit

which will run tests everywhere in project where Makefile contains
check-eunit subcommand definition.

The ${PATH} thing could be single file or directory. The latter should
contains *_tests.erl files which would be compiled and executed by
eunit.

The *_tests.erl

The reason of compiling on run instead of using autoconf for that is
to simplify tests developing and avoid situations, when you'd fixed
the test or add new one, but forgot to remove/compile beam file.

All test_*.beam files are been stored in test/couchdb/ebin directory,
the temporary test files will be places to test/couchdb/temp one.
Both directories will be removed by make clean/distclean command.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/2652d1ef
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/2652d1ef
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/2652d1ef

Branch: refs/heads/1963-eunit
Commit: 2652d1ef5cc3135c7da2123f8f515723446af1f8
Parents: ad8e28c
Author: Alexander Shorin <kx...@apache.org>
Authored: Fri May 16 00:08:36 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:16:34 2014 +0400

----------------------------------------------------------------------
 .gitignore               |   2 +-
 Makefile.am              |   5 +++
 configure.ac             |   2 +
 license.skip             |   2 +
 test/Makefile.am         |   2 +-
 test/couchdb/Makefile.am |  24 ++++++++++
 test/couchdb/run.in      | 102 ++++++++++++++++++++++++++++++++++++++++++
 7 files changed, 137 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 4b12bf7..85e2365 100644
--- a/.gitignore
+++ b/.gitignore
@@ -128,9 +128,9 @@ src/snappy/snappy.app
 stamp-h1
 test/.deps/
 test/bench/run
+test/couchdb/run
 test/etap/.deps/
 test/etap/run
-test/etap/run
 test/etap/temp.*
 test/etap/test_cfg_register
 test/etap/test_util.erl

http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/Makefile.am
----------------------------------------------------------------------
diff --git a/Makefile.am b/Makefile.am
index 22809f8..413c1a8 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -92,6 +92,11 @@ if TESTS
 	$(top_builddir)/test/etap/run $(top_srcdir)/test/etap
 endif
 
+check-eunit: dev
+if TESTS
+	$(top_builddir)/test/couchdb/run -v $(top_srcdir)/test/couchdb
+endif
+
 cover: dev
 if TESTS
 	rm -f cover/*.coverdata

http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/configure.ac
----------------------------------------------------------------------
diff --git a/configure.ac b/configure.ac
index 01d0a7b..4b34811 100644
--- a/configure.ac
+++ b/configure.ac
@@ -753,6 +753,8 @@ AC_CONFIG_FILES([src/snappy/google-snappy/snappy-stubs-public.h])
 AC_CONFIG_FILES([src/ejson/Makefile])
 AC_CONFIG_FILES([test/Makefile])
 AC_CONFIG_FILES([test/bench/Makefile])
+AC_CONFIG_FILES([test/couchdb/run])
+AC_CONFIG_FILES([test/couchdb/Makefile])
 AC_CONFIG_FILES([test/etap/Makefile])
 AC_CONFIG_FILES([test/etap/test_util.erl])
 AC_CONFIG_FILES([test/javascript/Makefile])

http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/license.skip
----------------------------------------------------------------------
diff --git a/license.skip b/license.skip
index f5e49da..5d10e6c 100644
--- a/license.skip
+++ b/license.skip
@@ -168,6 +168,8 @@
 ^test/Makefile.in
 ^test/bench/Makefile
 ^test/bench/Makefile.in
+^test/couchdb/Makefile
+^test/couchdb/Makefile.in
 ^test/etap/.*.beam
 ^test/etap/.*.o
 ^test/etap/.deps/.*

http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/test/Makefile.am
----------------------------------------------------------------------
diff --git a/test/Makefile.am b/test/Makefile.am
index 7c70a5a..1237c6f 100644
--- a/test/Makefile.am
+++ b/test/Makefile.am
@@ -10,6 +10,6 @@
 ## License for the specific language governing permissions and limitations under
 ## the License.
 
-SUBDIRS = bench etap javascript view_server
+SUBDIRS = bench couchdb etap javascript view_server
 EXTRA_DIST = random_port.ini
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
new file mode 100644
index 0000000..5bd65dd
--- /dev/null
+++ b/test/couchdb/Makefile.am
@@ -0,0 +1,24 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+noinst_SCRIPTS = run
+
+all:
+	mkdir -p {ebin,temp}
+	chmod +x run
+
+EXTRA_DIST = \
+    run.in
+
+clean-local:
+	rm -rf ebin
+	rm -rf temp

http://git-wip-us.apache.org/repos/asf/couchdb/blob/2652d1ef/test/couchdb/run.in
----------------------------------------------------------------------
diff --git a/test/couchdb/run.in b/test/couchdb/run.in
new file mode 100644
index 0000000..80f4041
--- /dev/null
+++ b/test/couchdb/run.in
@@ -0,0 +1,102 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -DTEST -pa @abs_top_builddir@/test/couchdb/ebin
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+%% use this file except in compliance with the License. You may obtain a copy of
+%% the License at
+%%
+%%   http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+%% License for the specific language governing permissions and limitations under
+%% the License.
+
+-define(BUILDDIR, "@abs_top_builddir@").
+-define(TESTS_EBIN, filename:join([?BUILDDIR, "test", "couchdb", "ebin"])).
+-define(TESTS_TEMP, filename:join([?BUILDDIR, "test", "couchdb", "temp"])).
+
+main([]) ->
+    io:fwrite("Path to test file or directory wasn't specified.~n"),
+    erlang:halt(1);
+main(["-v"]) ->
+    io:fwrite("Path to test file or directory wasn't specified.~n"),
+    erlang:halt(1);
+main(["-v", Path]) ->
+    run(Path, [verbose]);
+main(["-v", _ | _]) ->
+    io:fwrite("Only single tests source path is supported.~n"),
+    erlang:halt(1);
+main([Path]) ->
+    run(Path, []),
+    ok;
+main([_|_]) ->
+    io:fwrite("Only single tests source path is supported.~n"),
+    erlang:halt(1).
+
+
+run(Path, Options) ->
+    Mask = "*_tests.erl",
+    Files = list_files(Path, Mask),
+    Mods = compile(Files),
+    run_tests(Mods, Options).
+
+
+list_files(Path, Mask)->
+    AbsPath = filename:absname(Path),
+    case filelib:is_file(AbsPath) of
+        true ->
+            ok;
+        false ->
+            io:fwrite("File or directory not found: ~p~n", [AbsPath]),
+            erlang:halt(1)
+    end,
+    case filelib:is_dir(AbsPath) of
+        true ->
+            case filelib:wildcard(filename:join([AbsPath, Mask])) of
+                [] ->
+                    io:fwrite("No test files was found at ~p by mask ~p ~n",
+                              [AbsPath, Mask]),
+                    erlang:halt(1);
+                Files ->
+                    Files
+            end;
+        false -> [AbsPath]
+    end.
+
+
+compile(Files) ->
+    ok = filelib:ensure_dir(?TESTS_EBIN),
+    lists:map(
+        fun(File)->
+            io:fwrite("compile ~p~n", [File]),
+            {ok, Mod} = compile:file(File,
+                                     [report, verbose, {outdir, ?TESTS_EBIN}]),
+            Mod
+        end,
+    Files).
+
+
+run_tests(Mods, Options) ->
+    init_code_path(),
+    ok = filelib:ensure_dir(?TESTS_TEMP),
+    case eunit:test(Mods, Options) of
+        error -> erlang:halt(1);
+        _ -> ok
+    end.
+
+
+init_code_path() ->
+    Paths = [
+        "couchdb",
+        "ejson",
+        "erlang-oauth",
+        "ibrowse",
+        "mochiweb",
+        "snappy"
+    ],
+    lists:foreach(fun(Name) ->
+        code:add_patha(filename:join([?BUILDDIR, "src", Name]))
+    end, Paths).


[36/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 173-os-daemons-cfg-register.t etap test suite to eunit

Merged into couchdb_os_daemons_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/85f27505
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/85f27505
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/85f27505

Branch: refs/heads/1963-eunit
Commit: 85f27505ffb132ce61e916a5b36f82fc2fa11510
Parents: c0ed8cf
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 19:31:44 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:53:23 2014 +0400

----------------------------------------------------------------------
 configure.ac                              |   1 +
 license.skip                              |   4 +
 test/couchdb/Makefile.am                  |   2 +
 test/couchdb/couchdb_os_daemons_tests.erl | 102 ++++++++++++++++++++--
 test/couchdb/fixtures/Makefile.am         |  15 ++++
 test/couchdb/fixtures/test_cfg_register.c |  31 +++++++
 test/etap/173-os-daemon-cfg-register.t    | 116 -------------------------
 test/etap/Makefile.am                     |   5 --
 test/etap/test_cfg_register.c             |  31 -------
 9 files changed, 150 insertions(+), 157 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/configure.ac
----------------------------------------------------------------------
diff --git a/configure.ac b/configure.ac
index 4d45a97..aa293e3 100644
--- a/configure.ac
+++ b/configure.ac
@@ -755,6 +755,7 @@ AC_CONFIG_FILES([test/Makefile])
 AC_CONFIG_FILES([test/bench/Makefile])
 AC_CONFIG_FILES([test/couchdb/run])
 AC_CONFIG_FILES([test/couchdb/Makefile])
+AC_CONFIG_FILES([test/couchdb/fixtures/Makefile])
 AC_CONFIG_FILES([test/couchdb/couchdb_tests.hrl])
 AC_CONFIG_FILES([test/etap/Makefile])
 AC_CONFIG_FILES([test/etap/test_util.erl])

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/license.skip
----------------------------------------------------------------------
diff --git a/license.skip b/license.skip
index b069451..ec5df1e 100644
--- a/license.skip
+++ b/license.skip
@@ -171,6 +171,10 @@
 ^test/couchdb/Makefile
 ^test/couchdb/Makefile.in
 ^test/couchdb/fixtures/logo.png
+^test/couchdb/fixtures/Makefile
+^test/couchdb/fixtures/Makefile.in
+^test/couchdb/fixtures/test_cfg_register
+^test/couchdb/fixtures/test_cfg_register.o
 ^test/etap/.*.beam
 ^test/etap/.*.o
 ^test/etap/.deps/.*

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index bf306c8..937ddf6 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -10,6 +10,8 @@
 ## License for the specific language governing permissions and limitations under
 ## the License.
 
+SUBDIRS = fixtures
+
 noinst_SCRIPTS = run
 
 all:

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/couchdb/couchdb_os_daemons_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_os_daemons_tests.erl b/test/couchdb/couchdb_os_daemons_tests.erl
index 0591176..ece59c8 100644
--- a/test/couchdb/couchdb_os_daemons_tests.erl
+++ b/test/couchdb/couchdb_os_daemons_tests.erl
@@ -32,6 +32,7 @@
 -define(DAEMON_CAN_REBOOT, "os_daemon_can_reboot.sh").
 -define(DAEMON_DIE_ON_BOOT, "os_daemon_die_on_boot.sh").
 -define(DAEMON_DIE_QUICKLY, "os_daemon_die_quickly.sh").
+-define(DAEMON_CFGREG, "test_cfg_register").
 -define(DELAY, 100).
 
 
@@ -104,6 +105,21 @@ error_test_() ->
         }
     }.
 
+configuration_register_test_() ->
+    {
+        "OS daemon subscribed to config changes",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{?DAEMON_CFGREG, Fun} || Fun <- [
+                fun should_start_daemon/2,
+                fun should_restart_daemon_on_section_change/2,
+                fun should_not_restart_daemon_on_changing_ignored_section_key/2,
+                fun should_restart_daemon_on_section_key_change/2
+            ]]
+        }
+    }.
+
 
 should_check_daemon(DName, _) ->
     ?_test(begin
@@ -184,7 +200,7 @@ should_not_being_halted(DName, _) ->
     ?_test(begin
         timer:sleep(1000),
         {ok, [D1]} = couch_os_daemons:info([table]),
-        check_daemon(D1, DName, 0),
+        check_daemon(D1, DName, running, 0),
 
         % Should reboot every two seconds. We're at 1s, so wait
         % until 3s to be in the middle of the next invocation's
@@ -192,7 +208,7 @@ should_not_being_halted(DName, _) ->
 
         timer:sleep(2000),
         {ok, [D2]} = couch_os_daemons:info([table]),
-        check_daemon(D2, DName, 1),
+        check_daemon(D2, DName, running, 1),
 
         % If the kill command changed, that means we rebooted the process.
         ?assertNotEqual(D1#daemon.kill, D2#daemon.kill)
@@ -204,17 +220,93 @@ should_halts(DName, Time) ->
     check_dead(D, DName),
     couch_config:delete("os_daemons", DName, false).
 
+should_start_daemon(DName, _) ->
+    ?_test(begin
+        wait_for_start(10),
+        {ok, [D]} = couch_os_daemons:info([table]),
+        check_daemon(D, DName, running, 0, [{"s1"}, {"s2", "k"}])
+    end).
+
+should_restart_daemon_on_section_change(DName, _) ->
+    ?_test(begin
+        wait_for_start(10),
+        {ok, [D1]} = couch_os_daemons:info([table]),
+        couch_config:set("s1", "k", "foo", false),
+        wait_for_restart(10),
+        {ok, [D2]} = couch_os_daemons:info([table]),
+        check_daemon(D2, DName, running, 0, [{"s1"}, {"s2", "k"}]),
+        ?assertNotEqual(D1, D2)
+    end).
+
+should_not_restart_daemon_on_changing_ignored_section_key(_, _) ->
+    ?_test(begin
+        wait_for_start(10),
+        {ok, [D1]} = couch_os_daemons:info([table]),
+        couch_config:set("s2", "k2", "baz", false),
+        timer:sleep(?DELAY),
+        {ok, [D2]} = couch_os_daemons:info([table]),
+        ?assertEqual(D1, D2)
+    end).
+
+should_restart_daemon_on_section_key_change(DName, _) ->
+    ?_test(begin
+        wait_for_start(10),
+        {ok, [D1]} = couch_os_daemons:info([table]),
+        couch_config:set("s2", "k", "bingo", false),
+        wait_for_restart(10),
+        {ok, [D2]} = couch_os_daemons:info([table]),
+        check_daemon(D2, DName, running, 0, [{"s1"}, {"s2", "k"}]),
+        ?assertNotEqual(D1, D2)
+    end).
+
+
+wait_for_start(0) ->
+    erlang:error({assertion_failed,
+                  [{module, ?MODULE},
+                   {line, ?LINE},
+                   {reason, "Timeout on waiting daemon for start"}]});
+wait_for_start(N) ->
+    case couch_os_daemons:info([table]) of
+        {ok, []} ->
+            timer:sleep(?DELAY),
+            wait_for_start(N - 1);
+        _ ->
+            timer:sleep(1000)
+    end.
+
+wait_for_restart(0) ->
+    erlang:error({assertion_failed,
+                  [{module, ?MODULE},
+                   {line, ?LINE},
+                   {reason, "Timeout on waiting daemon for restart"}]});
+wait_for_restart(N) ->
+    {ok, [D]} = couch_os_daemons:info([table]),
+    case D#daemon.status of
+        restarting ->
+            timer:sleep(?DELAY),
+            wait_for_restart(N - 1);
+        _ ->
+            timer:sleep(1000)
+    end.
+
 check_daemon(D) ->
     check_daemon(D, D#daemon.name).
 
 check_daemon(D, Name) ->
-    check_daemon(D, Name, 0).
+    check_daemon(D, Name, running).
+
+check_daemon(D, Name, Status) ->
+    check_daemon(D, Name, Status, 0).
+
+check_daemon(D, Name, Status, Errs) ->
+    check_daemon(D, Name, Status, Errs, []).
 
-check_daemon(D, Name, Errs) ->
+check_daemon(D, Name, Status, Errs, CfgPatterns) ->
     ?assert(is_port(D#daemon.port)),
     ?assertEqual(Name, D#daemon.name),
     ?assertNotEqual(undefined, D#daemon.kill),
-    ?assertEqual(running, D#daemon.status),
+    ?assertEqual(Status, D#daemon.status),
+    ?assertEqual(CfgPatterns, D#daemon.cfg_patterns),
     ?assertEqual(Errs, length(D#daemon.errors)),
     ?assertEqual([], D#daemon.buf).
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/couchdb/fixtures/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/Makefile.am b/test/couchdb/fixtures/Makefile.am
new file mode 100644
index 0000000..1273234
--- /dev/null
+++ b/test/couchdb/fixtures/Makefile.am
@@ -0,0 +1,15 @@
+## Licensed under the Apache License, Version 2.0 (the "License"); you may not
+## use this file except in compliance with the License. You may obtain a copy of
+## the License at
+##
+##   http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+## License for the specific language governing permissions and limitations under
+## the License.
+
+noinst_PROGRAMS = test_cfg_register
+test_cfg_register_SOURCES = test_cfg_register.c
+test_cfg_register_CFLAGS = -D_BSD_SOURCE

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/couchdb/fixtures/test_cfg_register.c
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/test_cfg_register.c b/test/couchdb/fixtures/test_cfg_register.c
new file mode 100644
index 0000000..c910bac
--- /dev/null
+++ b/test/couchdb/fixtures/test_cfg_register.c
@@ -0,0 +1,31 @@
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include <stdio.h>
+#include <stdlib.h>
+
+int
+main(int argc, const char * argv[])
+{
+    char c = '\0';
+    size_t num = 1;
+    
+    fprintf(stdout, "[\"register\", \"s1\"]\n");
+    fprintf(stdout, "[\"register\", \"s2\", \"k\"]\n");
+    fflush(stdout);
+    
+    while(c != '\n' && num > 0) {
+        num = fread(&c, 1, 1, stdin);
+    }
+    
+    exit(0);
+}

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/etap/173-os-daemon-cfg-register.t
----------------------------------------------------------------------
diff --git a/test/etap/173-os-daemon-cfg-register.t b/test/etap/173-os-daemon-cfg-register.t
deleted file mode 100755
index 256ee7d..0000000
--- a/test/etap/173-os-daemon-cfg-register.t
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-daemon_name() ->
-    "wheee".
-
-daemon_cmd() ->
-    test_util:build_file("test/etap/test_cfg_register").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(27),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(test_util:config_files()),
-    couch_os_daemons:start_link(),
-    
-    DaemonCmd = daemon_cmd() ++ " 2> /dev/null",
-    
-    etap:diag("Booting the daemon"),
-    couch_config:set("os_daemons", daemon_name(), DaemonCmd, false),
-    wait_for_start(10),
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, running),
-    
-    etap:diag("Daemon restarts when section changes."),
-    couch_config:set("s1", "k", "foo", false),
-    wait_for_restart(10),
-    {ok, [D2]} = couch_os_daemons:info([table]),
-    check_daemon(D2, running),
-    etap:isnt(D2#daemon.kill, D1#daemon.kill, "Kill command shows restart."),
-
-    etap:diag("Daemon doesn't restart for ignored section key."),
-    couch_config:set("s2", "k2", "baz", false),
-    timer:sleep(1000), % Message travel time.
-    {ok, [D3]} = couch_os_daemons:info([table]),
-    etap:is(D3, D2, "Same daemon info after ignored config change."),
-    
-    etap:diag("Daemon restarts for specific section/key pairs."),
-    couch_config:set("s2", "k", "bingo", false),
-    wait_for_restart(10),
-    {ok, [D4]} = couch_os_daemons:info([table]),
-    check_daemon(D4, running),
-    etap:isnt(D4#daemon.kill, D3#daemon.kill, "Kill command changed again."),
-    
-    ok.
-
-wait_for_start(0) ->
-    throw({error, wait_for_start});
-wait_for_start(N) ->
-    case couch_os_daemons:info([table]) of
-        {ok, []} ->
-            timer:sleep(200),
-            wait_for_start(N-1);
-        _ ->
-            timer:sleep(1000)
-    end.
-
-wait_for_restart(0) ->
-    throw({error, wait_for_restart});
-wait_for_restart(N) ->
-    {ok, [D]} = couch_os_daemons:info([table]),
-    case D#daemon.status of
-        restarting ->
-            timer:sleep(200),
-            wait_for_restart(N-1);
-        _ ->
-            timer:sleep(1000)
-    end.
-
-check_daemon(D, Status) ->
-    BaseName = filename:basename(daemon_cmd()) ++ " 2> /dev/null",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, daemon_name(), "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.status, Status, "Daemon status is correct."),
-    etap:is(D#daemon.cfg_patterns, [{"s1"}, {"s2", "k"}], "Cfg patterns set"),
-    etap:is(D#daemon.errors, [], "No errors have occurred."),
-    etap:isnt(D#daemon.buf, nil, "Buffer is active.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index e42d398..05a7870 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -13,10 +13,6 @@
 noinst_SCRIPTS = run
 noinst_DATA = test_util.beam test_web.beam
 
-noinst_PROGRAMS = test_cfg_register
-test_cfg_register_SOURCES = test_cfg_register.c
-test_cfg_register_CFLAGS = -D_BSD_SOURCE
-
 %.beam: %.erl
 	$(ERLC) $<
 
@@ -36,7 +32,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    173-os-daemon-cfg-register.t \
     180-http-proxy.ini \
     180-http-proxy.t \
     190-json-stream-parse.t \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/85f27505/test/etap/test_cfg_register.c
----------------------------------------------------------------------
diff --git a/test/etap/test_cfg_register.c b/test/etap/test_cfg_register.c
deleted file mode 100644
index c910bac..0000000
--- a/test/etap/test_cfg_register.c
+++ /dev/null
@@ -1,31 +0,0 @@
-// Licensed under the Apache License, Version 2.0 (the "License"); you may not
-// use this file except in compliance with the License. You may obtain a copy of
-// the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-// License for the specific language governing permissions and limitations under
-// the License.
-
-#include <stdio.h>
-#include <stdlib.h>
-
-int
-main(int argc, const char * argv[])
-{
-    char c = '\0';
-    size_t num = 1;
-    
-    fprintf(stdout, "[\"register\", \"s1\"]\n");
-    fprintf(stdout, "[\"register\", \"s2\", \"k\"]\n");
-    fflush(stdout);
-    
-    while(c != '\n' && num > 0) {
-        num = fread(&c, 1, 1, stdin);
-    }
-    
-    exit(0);
-}


[07/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 010-file-basics.t and 011-file-headers.t etap test suites to eunit

Both merged into single suite since they tests single target and shares
common bits.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/436ad3ad
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/436ad3ad
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/436ad3ad

Branch: refs/heads/1963-eunit
Commit: 436ad3ad4f3853ee915996994d447192f8410693
Parents: d4b721f
Author: Alexander Shorin <kx...@apache.org>
Authored: Fri May 16 18:38:56 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:55:24 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am          |   1 +
 test/couchdb/couch_file_tests.erl | 266 +++++++++++++++++++++++++++++++++
 test/couchdb/couchdb_tests.hrl.in |   7 +
 test/etap/010-file-basics.t       | 113 --------------
 test/etap/011-file-headers.t      | 152 -------------------
 test/etap/Makefile.am             |   2 -
 6 files changed, 274 insertions(+), 267 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/436ad3ad/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 8607049..c3d170f 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -19,6 +19,7 @@ all:
 eunit_files = \
     couchdb_modules_load_tests.erl \
     couch_util_tests.erl \
+    couch_file_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/436ad3ad/test/couchdb/couch_file_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_file_tests.erl b/test/couchdb/couch_file_tests.erl
new file mode 100644
index 0000000..79c6e3e
--- /dev/null
+++ b/test/couchdb/couch_file_tests.erl
@@ -0,0 +1,266 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+
+-module(couch_file_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+-define(SIZE_BLOCK, 4096).
+-define(setup(F), {setup, fun setup/0, fun teardown/1, F}).
+-define(foreach(Fs), {foreach, fun setup/0, fun teardown/1, Fs}).
+
+
+setup() ->
+    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
+    Fd.
+
+teardown(Fd) ->
+    ok = couch_file:close(Fd).
+
+
+open_close_test_() ->
+    {
+        "Test for proper file open and close",
+        [
+            should_return_enoent_if_missed(),
+            should_ignore_invalid_flags_with_open(),
+            ?setup(fun should_return_pid_on_file_open/1),
+            should_close_file_properly(),
+            ?setup(fun should_create_empty_new_files/1)
+        ]
+    }.
+
+should_return_enoent_if_missed() ->
+    ?_assertEqual({error, enoent}, couch_file:open("not a real file")).
+
+should_ignore_invalid_flags_with_open() ->
+    ?_assertMatch({ok, _},
+                  couch_file:open(?tempfile(), [create, invalid_option])).
+
+should_return_pid_on_file_open(Fd) ->
+    ?_assert(is_pid(Fd)).
+
+should_close_file_properly() ->
+    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
+    ok = couch_file:close(Fd),
+    ?_assert(true).
+
+should_create_empty_new_files(Fd) ->
+    ?_assertMatch({ok, 0}, couch_file:bytes(Fd)).
+
+
+read_write_test_() ->
+    {
+        "Common file read/write tests",
+        ?foreach([
+            fun should_increase_file_size_on_write/1,
+            fun should_return_current_file_size_on_write/1,
+            fun should_write_and_read_term/1,
+            fun should_write_and_read_binary/1,
+            fun should_write_and_read_large_binary/1,
+            fun should_return_term_as_binary_for_reading_binary/1,
+            fun should_read_term_written_as_binary/1,
+            fun should_read_iolist/1,
+            fun should_fsync/1,
+            fun should_not_read_beyond_eof/1,
+            fun should_truncate/1
+        ])
+    }.
+
+
+should_increase_file_size_on_write(Fd) ->
+    {ok, 0, _} = couch_file:append_term(Fd, foo),
+    {ok, Size} = couch_file:bytes(Fd),
+    ?_assert(Size > 0).
+
+should_return_current_file_size_on_write(Fd) ->
+    {ok, 0, _} = couch_file:append_term(Fd, foo),
+    {ok, Size} = couch_file:bytes(Fd),
+    ?_assertMatch({ok, Size, _}, couch_file:append_term(Fd, bar)).
+
+should_write_and_read_term(Fd) ->
+    {ok, Pos, _} = couch_file:append_term(Fd, foo),
+    ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, Pos)).
+
+should_write_and_read_binary(Fd) ->
+    {ok, Pos, _} = couch_file:append_binary(Fd, <<"fancy!">>),
+    ?_assertMatch({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Pos)).
+
+should_return_term_as_binary_for_reading_binary(Fd) ->
+    {ok, Pos, _} = couch_file:append_term(Fd, foo),
+    Foo = couch_compress:compress(foo, snappy),
+    ?_assertMatch({ok, Foo}, couch_file:pread_binary(Fd, Pos)).
+
+should_read_term_written_as_binary(Fd) ->
+    {ok, Pos, _} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
+    ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, Pos)).
+
+should_write_and_read_large_binary(Fd) ->
+    BigBin = list_to_binary(lists:duplicate(100000, 0)),
+    {ok, Pos, _} = couch_file:append_binary(Fd, BigBin),
+    ?_assertMatch({ok, BigBin}, couch_file:pread_binary(Fd, Pos)).
+
+should_read_iolist(Fd) ->
+    %% append_binary == append_iolist?
+    %% Possible bug in pread_iolist or iolist() -> append_binary
+    {ok, Pos, _} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
+    {ok, IoList} = couch_file:pread_iolist(Fd, Pos),
+    ?_assertMatch(<<"foombam">>, iolist_to_binary(IoList)).
+
+should_fsync(Fd) ->
+    {"How does on test fsync?", ?_assertMatch(ok, couch_file:sync(Fd))}.
+
+should_not_read_beyond_eof(_) ->
+    {"No idea how to test reading beyond EOF", ?_assert(true)}.
+
+should_truncate(Fd) ->
+    {ok, 0, _} = couch_file:append_term(Fd, foo),
+    {ok, Size} = couch_file:bytes(Fd),
+    BigBin = list_to_binary(lists:duplicate(100000, 0)),
+    {ok, _, _} = couch_file:append_binary(Fd, BigBin),
+    ok = couch_file:truncate(Fd, Size),
+    ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, 0)).
+
+
+header_test_() ->
+    {
+        "File header read/write tests",
+        [
+            ?foreach([
+                fun should_write_and_read_atom_header/1,
+                fun should_write_and_read_tuple_header/1,
+                fun should_write_and_read_second_header/1,
+                fun should_truncate_second_header/1,
+                fun should_produce_same_file_size_on_rewrite/1,
+                fun should_save_headers_larger_than_block_size/1
+            ]),
+            should_recover_header_marker_corruption(),
+            should_recover_header_size_corruption(),
+            should_recover_header_md5sig_corruption(),
+            should_recover_header_data_corruption()
+        ]
+    }.
+
+
+should_write_and_read_atom_header(Fd) ->
+    ok = couch_file:write_header(Fd, hello),
+    ?_assertMatch({ok, hello}, couch_file:read_header(Fd)).
+
+should_write_and_read_tuple_header(Fd) ->
+    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
+    ?_assertMatch({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd)).
+
+should_write_and_read_second_header(Fd) ->
+    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
+    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
+    ?_assertMatch({ok, [foo, <<"more">>]}, couch_file:read_header(Fd)).
+
+should_truncate_second_header(Fd) ->
+    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
+    {ok, Size} = couch_file:bytes(Fd),
+    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
+    ok = couch_file:truncate(Fd, Size),
+    ?_assertMatch({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd)).
+
+should_produce_same_file_size_on_rewrite(Fd) ->
+    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
+    {ok, Size1} = couch_file:bytes(Fd),
+    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
+    {ok, Size2} = couch_file:bytes(Fd),
+    ok = couch_file:truncate(Fd, Size1),
+    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
+    ?_assertMatch({ok, Size2}, couch_file:bytes(Fd)).
+
+should_save_headers_larger_than_block_size(Fd) ->
+    Header = erlang:make_tuple(5000, <<"CouchDB">>),
+    couch_file:write_header(Fd, Header),
+    {"COUCHDB-1319", ?_assertMatch({ok, Header}, couch_file:read_header(Fd))}.
+
+
+should_recover_header_marker_corruption() ->
+    ?_assertMatch(
+        ok,
+        check_header_recovery(
+            fun(CouchFd, RawFd, Expect, HeaderPos) ->
+                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
+                file:pwrite(RawFd, HeaderPos, <<0>>),
+                ?assertMatch(Expect, couch_file:read_header(CouchFd))
+            end)
+    ).
+
+should_recover_header_size_corruption() ->
+    ?_assertMatch(
+        ok,
+        check_header_recovery(
+            fun(CouchFd, RawFd, Expect, HeaderPos) ->
+                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
+                % +1 for 0x1 byte marker
+                file:pwrite(RawFd, HeaderPos + 1, <<10/integer>>),
+                ?assertMatch(Expect, couch_file:read_header(CouchFd))
+            end)
+    ).
+
+should_recover_header_md5sig_corruption() ->
+    ?_assertMatch(
+        ok,
+        check_header_recovery(
+            fun(CouchFd, RawFd, Expect, HeaderPos) ->
+                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
+                % +5 = +1 for 0x1 byte and +4 for term size.
+                file:pwrite(RawFd, HeaderPos + 5, <<"F01034F88D320B22">>),
+                ?assertMatch(Expect, couch_file:read_header(CouchFd))
+            end)
+    ).
+
+should_recover_header_data_corruption() ->
+    ?_assertMatch(
+        ok,
+        check_header_recovery(
+            fun(CouchFd, RawFd, Expect, HeaderPos) ->
+                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
+                % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig
+                file:pwrite(RawFd, HeaderPos + 21, <<"some data goes here!">>),
+                ?assertMatch(Expect, couch_file:read_header(CouchFd))
+            end)
+    ).
+
+
+check_header_recovery(CheckFun) ->
+    Path = ?tempfile(),
+    {ok, Fd} = couch_file:open(Path, [create, overwrite]),
+    {ok, RawFd} = file:open(Path, [read, write, raw, binary]),
+
+    {ok, _} = write_random_data(Fd),
+    ExpectHeader = {some_atom, <<"a binary">>, 756},
+    ok = couch_file:write_header(Fd, ExpectHeader),
+
+    {ok, HeaderPos} = write_random_data(Fd),
+    ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}),
+
+    CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos),
+
+    ok = file:close(RawFd),
+    ok = couch_file:close(Fd),
+    ok.
+
+write_random_data(Fd) ->
+    write_random_data(Fd, 100 + random:uniform(1000)).
+
+write_random_data(Fd, 0) ->
+    {ok, Bytes} = couch_file:bytes(Fd),
+    {ok, (1 + Bytes div ?SIZE_BLOCK) * ?SIZE_BLOCK};
+write_random_data(Fd, N) ->
+    Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]],
+    Term = lists:nth(random:uniform(4) + 1, Choices),
+    {ok, _, _} = couch_file:append_term(Fd, Term),
+    write_random_data(Fd, N - 1).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/436ad3ad/test/couchdb/couchdb_tests.hrl.in
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_tests.hrl.in b/test/couchdb/couchdb_tests.hrl.in
index c749f3a..e5a8b4e 100644
--- a/test/couchdb/couchdb_tests.hrl.in
+++ b/test/couchdb/couchdb_tests.hrl.in
@@ -21,3 +21,10 @@
 -define(TEMPDIR,
     filename:join([?SOURCEDIR, "test", "couchdb", "temp"])).
 
+-define(tempfile,
+    fun() ->
+        {A, B, C} = erlang:now(),
+        N = node(),
+        FileName = lists:flatten(io_lib:format("~p-~p.~p.~p", [N, A, B, C])),
+        filename:join([?TEMPDIR, FileName])
+    end).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/436ad3ad/test/etap/010-file-basics.t
----------------------------------------------------------------------
diff --git a/test/etap/010-file-basics.t b/test/etap/010-file-basics.t
deleted file mode 100755
index fb1b29e..0000000
--- a/test/etap/010-file-basics.t
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(etap_match(Got, Expected, Desc),
-        etap:fun_is(fun(XXXXXX) ->
-            case XXXXXX of Expected -> true; _ -> false end
-        end, Got, Desc)).
-
-filename() -> test_util:build_file("test/etap/temp.010").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(19),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    etap:is({error, enoent}, couch_file:open("not a real file"),
-        "Opening a non-existant file should return an enoent error."),
-
-    etap:fun_is(
-        fun({ok, _}) -> true; (_) -> false end,
-        couch_file:open(filename() ++ ".1", [create, invalid_option]),
-        "Invalid flags to open are ignored."
-    ),
-
-    {ok, Fd} = couch_file:open(filename() ++ ".0", [create, overwrite]),
-    etap:ok(is_pid(Fd),
-        "Returned file descriptor is a Pid"),
-
-    etap:is({ok, 0}, couch_file:bytes(Fd),
-        "Newly created files have 0 bytes."),
-
-    ?etap_match(couch_file:append_term(Fd, foo), {ok, 0, _},
-        "Appending a term returns the previous end of file position."),
-
-    {ok, Size} = couch_file:bytes(Fd),
-    etap:is_greater(Size, 0,
-        "Writing a term increased the file size."),
-
-    ?etap_match(couch_file:append_binary(Fd, <<"fancy!">>), {ok, Size, _},
-        "Appending a binary returns the current file size."),
-
-    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
-        "Reading the first term returns what we wrote: foo"),
-
-    etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size),
-        "Reading back the binary returns what we wrote: <<\"fancy\">>."),
-
-    etap:is({ok, couch_compress:compress(foo, snappy)},
-        couch_file:pread_binary(Fd, 0),
-        "Reading a binary at a term position returns the term as binary."
-    ),
-
-    {ok, BinPos, _} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
-    etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
-        "Reading a term from a written binary term representation succeeds."),
-        
-    BigBin = list_to_binary(lists:duplicate(100000, 0)),
-    {ok, BigBinPos, _} = couch_file:append_binary(Fd, BigBin),
-    etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos),
-        "Reading a large term from a written representation succeeds."),
-    
-    ok = couch_file:write_header(Fd, hello),
-    etap:is({ok, hello}, couch_file:read_header(Fd),
-        "Reading a header succeeds."),
-        
-    {ok, BigBinPos2, _} = couch_file:append_binary(Fd, BigBin),
-    etap:is({ok, BigBin}, couch_file:pread_binary(Fd, BigBinPos2),
-        "Reading a large term from a written representation succeeds 2."),
-
-    % append_binary == append_iolist?
-    % Possible bug in pread_iolist or iolist() -> append_binary
-    {ok, IOLPos, _} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
-    {ok, IoList} = couch_file:pread_iolist(Fd, IOLPos),
-    etap:is(<<"foombam">>, iolist_to_binary(IoList),
-        "Reading an results in a binary form of the written iolist()"),
-
-    % XXX: How does on test fsync?
-    etap:is(ok, couch_file:sync(Fd),
-        "Syncing does not cause an error."),
-
-    etap:is(ok, couch_file:truncate(Fd, Size),
-        "Truncating a file succeeds."),
-
-    %etap:is(eof, (catch couch_file:pread_binary(Fd, Size)),
-    %    "Reading data that was truncated fails.")
-    etap:skip(fun() -> ok end,
-        "No idea how to test reading beyond EOF"),
-
-    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
-        "Truncating does not affect data located before the truncation mark."),
-
-    etap:is(ok, couch_file:close(Fd),
-        "Files close properly."),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/436ad3ad/test/etap/011-file-headers.t
----------------------------------------------------------------------
diff --git a/test/etap/011-file-headers.t b/test/etap/011-file-headers.t
deleted file mode 100755
index a26b032..0000000
--- a/test/etap/011-file-headers.t
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() -> test_util:build_file("test/etap/temp.011").
-sizeblock() -> 4096. % Need to keep this in sync with couch_file.erl
-
-main(_) ->
-    test_util:init_code_path(),
-    {S1, S2, S3} = now(),
-    random:seed(S1, S2, S3),
-
-    etap:plan(18),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-
-    etap:is({ok, 0}, couch_file:bytes(Fd),
-        "File should be initialized to contain zero bytes."),
-
-    etap:is(ok, couch_file:write_header(Fd, {<<"some_data">>, 32}),
-        "Writing a header succeeds."),
-
-    {ok, Size1} = couch_file:bytes(Fd),
-    etap:is_greater(Size1, 0,
-        "Writing a header allocates space in the file."),
-
-    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
-        "Reading the header returns what we wrote."),
-
-    etap:is(ok, couch_file:write_header(Fd, [foo, <<"more">>]),
-        "Writing a second header succeeds."),
-
-    {ok, Size2} = couch_file:bytes(Fd),
-    etap:is_greater(Size2, Size1,
-        "Writing a second header allocates more space."),
-
-    etap:is({ok, [foo, <<"more">>]}, couch_file:read_header(Fd),
-        "Reading the second header does not return the first header."),
-
-    % Delete the second header.
-    ok = couch_file:truncate(Fd, Size1),
-
-    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
-        "Reading the header after a truncation returns a previous header."),
-
-    couch_file:write_header(Fd, [foo, <<"more">>]),
-    etap:is({ok, Size2}, couch_file:bytes(Fd),
-        "Rewriting the same second header returns the same second size."),
-
-    couch_file:write_header(Fd, erlang:make_tuple(5000, <<"CouchDB">>)),
-    etap:is(
-        couch_file:read_header(Fd),
-        {ok, erlang:make_tuple(5000, <<"CouchDB">>)},
-        "Headers larger than the block size can be saved (COUCHDB-1319)"
-    ),
-
-    ok = couch_file:close(Fd),
-
-    % Now for the fun stuff. Try corrupting the second header and see
-    % if we recover properly.
-
-    % Destroy the 0x1 byte that marks a header
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        file:pwrite(RawFd, HeaderPos, <<0>>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the byte marker should read the previous header.")
-    end),
-
-    % Corrupt the size.
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        % +1 for 0x1 byte marker
-        file:pwrite(RawFd, HeaderPos+1, <<10/integer>>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the size should read the previous header.")
-    end),
-
-    % Corrupt the MD5 signature
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        % +5 = +1 for 0x1 byte and +4 for term size.
-        file:pwrite(RawFd, HeaderPos+5, <<"F01034F88D320B22">>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the MD5 signature should read the previous header.")
-    end),
-
-    % Corrupt the data
-    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
-        etap:isnt(Expect, couch_file:read_header(CouchFd),
-            "Should return a different header before corruption."),
-        % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig
-        file:pwrite(RawFd, HeaderPos+21, <<"some data goes here!">>),
-        etap:is(Expect, couch_file:read_header(CouchFd),
-            "Corrupting the header data should read the previous header.")
-    end),
-
-    ok.
-
-check_header_recovery(CheckFun) ->
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-    {ok, RawFd} = file:open(filename(), [read, write, raw, binary]),
-
-    {ok, _} = write_random_data(Fd),
-    ExpectHeader = {some_atom, <<"a binary">>, 756},
-    ok = couch_file:write_header(Fd, ExpectHeader),
-
-    {ok, HeaderPos} = write_random_data(Fd),
-    ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}),
-
-    CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos),
-
-    ok = file:close(RawFd),
-    ok = couch_file:close(Fd),
-    ok.
-
-write_random_data(Fd) ->
-    write_random_data(Fd, 100 + random:uniform(1000)).
-
-write_random_data(Fd, 0) ->
-    {ok, Bytes} = couch_file:bytes(Fd),
-    {ok, (1 + Bytes div sizeblock()) * sizeblock()};
-write_random_data(Fd, N) ->
-    Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]],
-    Term = lists:nth(random:uniform(4) + 1, Choices),
-    {ok, _, _} = couch_file:append_term(Fd, Term),
-    write_random_data(Fd, N-1).
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/436ad3ad/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 9a67ed7..7ac28b3 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,8 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    010-file-basics.t \
-    011-file-headers.t \
     020-btree-basics.t \
     021-btree-reductions.t \
     030-doc-from-json.t \


[03/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Add common header for eunit test files


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/9427d690
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/9427d690
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/9427d690

Branch: refs/heads/1963-eunit
Commit: 9427d69070f7ce5af1e01c7f914a9a05aefcc002
Parents: 2652d1e
Author: Alexander Shorin <kx...@apache.org>
Authored: Fri May 16 03:46:16 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:16:40 2014 +0400

----------------------------------------------------------------------
 .gitignore                        |  1 +
 configure.ac                      |  1 +
 test/couchdb/Makefile.am          |  6 +++++-
 test/couchdb/couchdb_tests.hrl.in | 23 +++++++++++++++++++++++
 4 files changed, 30 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/9427d690/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 85e2365..80b1ae0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -129,6 +129,7 @@ stamp-h1
 test/.deps/
 test/bench/run
 test/couchdb/run
+test/couchdb/couchdb_tests.hrl
 test/etap/.deps/
 test/etap/run
 test/etap/temp.*

http://git-wip-us.apache.org/repos/asf/couchdb/blob/9427d690/configure.ac
----------------------------------------------------------------------
diff --git a/configure.ac b/configure.ac
index 4b34811..4d45a97 100644
--- a/configure.ac
+++ b/configure.ac
@@ -755,6 +755,7 @@ AC_CONFIG_FILES([test/Makefile])
 AC_CONFIG_FILES([test/bench/Makefile])
 AC_CONFIG_FILES([test/couchdb/run])
 AC_CONFIG_FILES([test/couchdb/Makefile])
+AC_CONFIG_FILES([test/couchdb/couchdb_tests.hrl])
 AC_CONFIG_FILES([test/etap/Makefile])
 AC_CONFIG_FILES([test/etap/test_util.erl])
 AC_CONFIG_FILES([test/javascript/Makefile])

http://git-wip-us.apache.org/repos/asf/couchdb/blob/9427d690/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 5bd65dd..fac5648 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -16,8 +16,12 @@ all:
 	mkdir -p {ebin,temp}
 	chmod +x run
 
+eunit_files = \
+    couchdb_tests.hrl
+
 EXTRA_DIST = \
-    run.in
+    run.in \
+    $(eunit_files)
 
 clean-local:
 	rm -rf ebin

http://git-wip-us.apache.org/repos/asf/couchdb/blob/9427d690/test/couchdb/couchdb_tests.hrl.in
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_tests.hrl.in b/test/couchdb/couchdb_tests.hrl.in
new file mode 100644
index 0000000..c749f3a
--- /dev/null
+++ b/test/couchdb/couchdb_tests.hrl.in
@@ -0,0 +1,23 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-include_lib("eunit/include/eunit.hrl").
+
+-define(BUILDDIR, "@abs_top_builddir@").
+-define(SOURCEDIR, "@abs_top_srcdir@").
+-define(CONFIG_CHAIN, [
+    filename:join([?BUILDDIR, "etc", "couchdb", "default_dev.ini"]),
+    filename:join([?SOURCEDIR, "test", "random_port.ini"]),
+    filename:join([?BUILDDIR, "etc", "couchdb", "local_dev.ini"])]).
+-define(TEMPDIR,
+    filename:join([?SOURCEDIR, "test", "couchdb", "temp"])).
+


[24/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 083-config-no-files.t etap test suite to eunit

Merged into couch_config_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/c8cdb7ff
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/c8cdb7ff
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/c8cdb7ff

Branch: refs/heads/1963-eunit
Commit: c8cdb7ff2514cd8c3fc847f27b3723c9f05f4459
Parents: 63ba46d
Author: Alexander Shorin <kx...@apache.org>
Authored: Mon May 26 09:46:06 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:51:33 2014 +0400

----------------------------------------------------------------------
 test/couchdb/couch_config_tests.erl | 37 +++++++++++++++++++++-
 test/etap/083-config-no-files.t     | 53 --------------------------------
 test/etap/Makefile.am               |  1 -
 3 files changed, 36 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/c8cdb7ff/test/couchdb/couch_config_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_config_tests.erl b/test/couchdb/couch_config_tests.erl
index 1e5d99e..ec2ee21 100644
--- a/test/couchdb/couch_config_tests.erl
+++ b/test/couchdb/couch_config_tests.erl
@@ -41,6 +41,9 @@ setup(Chain) ->
     {ok, Pid} = couch_config:start_link(Chain),
     Pid.
 
+setup_empty() ->
+    setup([]).
+
 setup_register() ->
     ConfigPid = setup(),
     SentinelFunc = fun() ->
@@ -89,7 +92,8 @@ couch_config_test_() ->
             couch_config_del_tests(),
             config_override_tests(),
             config_persistent_changes_tests(),
-            config_register_tests()
+            config_register_tests(),
+            config_no_files_tests()
         ]
     }.
 
@@ -192,6 +196,20 @@ config_register_tests() ->
         }
     }.
 
+config_no_files_tests() ->
+    {
+        "Test couch_config with no files",
+        {
+            foreach,
+            fun setup_empty/0, fun teardown/1,
+            [
+                should_ensure_that_no_ini_files_loaded(),
+                should_create_non_persistent_option(),
+                should_create_persistent_option()
+            ]
+        }
+    }.
+
 
 should_load_all_configs() ->
     ?_assert(length(couch_config:all()) > 0).
@@ -447,3 +465,20 @@ should_not_trigger_handler_after_related_process_death({_, SentinelPid}) ->
             end
         end
     ).
+
+should_ensure_that_no_ini_files_loaded() ->
+    ?_assertEqual(0, length(couch_config:all())).
+
+should_create_non_persistent_option() ->
+    ?_assertEqual("80",
+        begin
+            ok = couch_config:set("httpd", "port", "80", false),
+            couch_config:get("httpd", "port")
+        end).
+
+should_create_persistent_option() ->
+    ?_assertEqual("127.0.0.1",
+        begin
+            ok = couch_config:set("httpd", "bind_address", "127.0.0.1"),
+            couch_config:get("httpd", "bind_address")
+        end).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c8cdb7ff/test/etap/083-config-no-files.t
----------------------------------------------------------------------
diff --git a/test/etap/083-config-no-files.t b/test/etap/083-config-no-files.t
deleted file mode 100755
index 0ce38e6..0000000
--- a/test/etap/083-config-no-files.t
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(3),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link([]),
-
-    etap:fun_is(
-        fun(KVPairs) -> length(KVPairs) == 0 end,
-        couch_config:all(),
-        "No INI files specified returns 0 key/value pairs."
-    ),
-
-    ok = couch_config:set("httpd", "port", "80", false),
-
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "80",
-        "Created a new non-persisted k/v pair."
-    ),
-
-    ok = couch_config:set("httpd", "bind_address", "127.0.0.1"),
-    etap:is(
-        couch_config:get("httpd", "bind_address"),
-        "127.0.0.1",
-        "Asking for a persistent key/value pair doesn't choke."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c8cdb7ff/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 25889f4..091dae9 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    083-config-no-files.t \
     090-task-status.t \
     100-ref-counter.t \
     120-stats-collect.t \


[25/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 090-task-status.t etap test suite to eunit

Split huge test case into multiple ones. Fix issue with get_task_prop
when Acc may be reset if searched task isn't last in the list.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/0e0cde44
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/0e0cde44
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/0e0cde44

Branch: refs/heads/1963-eunit
Commit: 0e0cde44d790c10164852eee512cbbb6f96c1fca
Parents: c8cdb7f
Author: Alexander Shorin <kx...@apache.org>
Authored: Mon May 26 20:23:41 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:12 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                 |   1 +
 test/couchdb/couch_task_status_tests.erl | 225 +++++++++++++++++++++
 test/etap/090-task-status.t              | 279 --------------------------
 test/etap/Makefile.am                    |   1 -
 4 files changed, 226 insertions(+), 280 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/0e0cde44/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 3cd7060..7d10b89 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -34,6 +34,7 @@ eunit_files = \
     couch_auth_cache_tests.erl \
     couchdb_file_compression_tests.erl \
     couch_config_tests.erl \
+    couch_task_status_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/0e0cde44/test/couchdb/couch_task_status_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_task_status_tests.erl b/test/couchdb/couch_task_status_tests.erl
new file mode 100644
index 0000000..8c80a87
--- /dev/null
+++ b/test/couchdb/couch_task_status_tests.erl
@@ -0,0 +1,225 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_task_status_tests).
+
+-include("../../src/couchdb/couch_db.hrl").
+-include("couchdb_tests.hrl").
+
+-define(TIMEOUT, 1000).
+
+
+setup() ->
+    {ok, TaskStatusPid} = couch_task_status:start_link(),
+    TaskUpdaterPid = spawn(fun() -> loop() end),
+    {TaskStatusPid, TaskUpdaterPid}.
+
+teardown({TaskStatusPid, _}) ->
+    erlang:monitor(process, TaskStatusPid),
+    couch_task_status:stop(),
+    receive
+        {'DOWN', _, _, TaskStatusPid, _} ->
+            ok
+    after ?TIMEOUT ->
+        throw(timeout_error)
+    end.
+
+
+couch_task_status_test_() ->
+    {
+        "CouchDB task status updates",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_register_task/1,
+                fun should_set_task_startup_time/1,
+                fun should_have_update_time_as_startup_before_any_progress/1,
+                fun should_set_task_type/1,
+                fun should_not_register_multiple_tasks_for_same_pid/1,
+                fun should_set_task_progress/1,
+                fun should_update_task_progress/1,
+                fun should_update_time_changes_on_task_progress/1,
+                fun should_control_update_frequency/1,
+                fun should_reset_control_update_frequency/1,
+                fun should_track_multiple_tasks/1,
+                fun should_finish_task/1
+
+            ]
+        }
+    }.
+
+
+should_register_task({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    ?_assertEqual(1, length(couch_task_status:all())).
+
+should_set_task_startup_time({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    ?_assert(is_integer(get_task_prop(Pid, started_on))).
+
+should_have_update_time_as_startup_before_any_progress({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    StartTime = get_task_prop(Pid, started_on),
+    ?_assertEqual(StartTime, get_task_prop(Pid, updated_on)).
+
+should_set_task_type({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    ?_assertEqual(replication, get_task_prop(Pid, type)).
+
+should_not_register_multiple_tasks_for_same_pid({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    ?_assertEqual({add_task_error, already_registered},
+                  call(Pid, add, [{type, compaction}, {progress, 0}])).
+
+should_set_task_progress({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    ?_assertEqual(0, get_task_prop(Pid, progress)).
+
+should_update_task_progress({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    call(Pid, update, [{progress, 25}]),
+    ?_assertEqual(25, get_task_prop(Pid, progress)).
+
+should_update_time_changes_on_task_progress({_, Pid}) ->
+    ?_assert(
+        begin
+            ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+            ok = timer:sleep(1000),  % sleep awhile to customize update time
+            call(Pid, update, [{progress, 25}]),
+            get_task_prop(Pid, updated_on) > get_task_prop(Pid, started_on)
+        end).
+
+should_control_update_frequency({_, Pid}) ->
+    ?_assertEqual(66,
+        begin
+            ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+            call(Pid, update, [{progress, 50}]),
+            call(Pid, update_frequency, 500),
+            call(Pid, update, [{progress, 66}]),
+            call(Pid, update, [{progress, 77}]),
+            get_task_prop(Pid, progress)
+        end).
+
+should_reset_control_update_frequency({_, Pid}) ->
+    ?_assertEqual(87,
+        begin
+            ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+            call(Pid, update, [{progress, 50}]),
+            call(Pid, update_frequency, 500),
+            call(Pid, update, [{progress, 66}]),
+            call(Pid, update, [{progress, 77}]),
+            call(Pid, update_frequency, 0),
+            call(Pid, update, [{progress, 87}]),
+            get_task_prop(Pid, progress)
+        end).
+
+should_track_multiple_tasks(_) ->
+    ?_assert(run_multiple_tasks()).
+
+should_finish_task({_, Pid}) ->
+    ok = call(Pid, add, [{type, replication}, {progress, 0}]),
+    ?assertEqual(1, length(couch_task_status:all())),
+    ok = call(Pid, done),
+    ?_assertEqual(0, length(couch_task_status:all())).
+
+
+run_multiple_tasks() ->
+    Pid1 = spawn(fun() -> loop() end),
+    Pid2 = spawn(fun() -> loop() end),
+    Pid3 = spawn(fun() -> loop() end),
+    call(Pid1, add, [{type, replication}, {progress, 0}]),
+    call(Pid2, add, [{type, compaction}, {progress, 0}]),
+    call(Pid3, add, [{type, indexer}, {progress, 0}]),
+
+    ?assertEqual(3, length(couch_task_status:all())),
+    ?assertEqual(replication, get_task_prop(Pid1, type)),
+    ?assertEqual(compaction, get_task_prop(Pid2, type)),
+    ?assertEqual(indexer, get_task_prop(Pid3, type)),
+
+    call(Pid2, update, [{progress, 33}]),
+    call(Pid3, update, [{progress, 42}]),
+    call(Pid1, update, [{progress, 11}]),
+    ?assertEqual(42, get_task_prop(Pid3, progress)),
+    call(Pid1, update, [{progress, 72}]),
+    ?assertEqual(72, get_task_prop(Pid1, progress)),
+    ?assertEqual(33, get_task_prop(Pid2, progress)),
+
+    call(Pid1, done),
+    ?assertEqual(2, length(couch_task_status:all())),
+    call(Pid3, done),
+    ?assertEqual(1, length(couch_task_status:all())),
+    call(Pid2, done),
+    ?assertEqual(0, length(couch_task_status:all())),
+
+    true.
+
+
+loop() ->
+    receive
+        {add, Props, From} ->
+            Resp = couch_task_status:add_task(Props),
+            From ! {ok, self(), Resp},
+            loop();
+        {update, Props, From} ->
+            Resp = couch_task_status:update(Props),
+            From ! {ok, self(), Resp},
+            loop();
+        {update_frequency, Msecs, From} ->
+            Resp = couch_task_status:set_update_frequency(Msecs),
+            From ! {ok, self(), Resp},
+            loop();
+        {done, From} ->
+            From ! {ok, self(), ok}
+    end.
+
+call(Pid, Command) ->
+    Pid ! {Command, self()},
+    wait(Pid).
+
+call(Pid, Command, Arg) ->
+    Pid ! {Command, Arg, self()},
+    wait(Pid).
+
+wait(Pid) ->
+    receive
+        {ok, Pid, Msg} ->
+            Msg
+    after ?TIMEOUT ->
+        throw(timeout_error)
+    end.
+
+get_task_prop(Pid, Prop) ->
+    From = list_to_binary(pid_to_list(Pid)),
+    Element = lists:foldl(
+        fun(PropList, Acc) ->
+            case couch_util:get_value(pid, PropList) of
+                From ->
+                    [PropList | Acc];
+                _ ->
+                    Acc
+            end
+        end,
+        [], couch_task_status:all()
+    ),
+    case couch_util:get_value(Prop, hd(Element), nil) of
+        nil ->
+            erlang:error({assertion_failed,
+                         [{module, ?MODULE},
+                          {line, ?LINE},
+                          {reason, "Could not get property '"
+                                   ++ couch_util:to_list(Prop)
+                                   ++ "' for task "
+                                   ++ pid_to_list(Pid)}]});
+        Value ->
+            Value
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/0e0cde44/test/etap/090-task-status.t
----------------------------------------------------------------------
diff --git a/test/etap/090-task-status.t b/test/etap/090-task-status.t
deleted file mode 100755
index 23115bd..0000000
--- a/test/etap/090-task-status.t
+++ /dev/null
@@ -1,279 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(28),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-get_task_prop(Pid, Prop) ->
-    From = list_to_binary(pid_to_list(Pid)),
-    Element = lists:foldl(
-        fun(PropList,Acc) ->
-            case couch_util:get_value(pid,PropList) of
-                From ->
-                    [PropList | Acc];
-                _ ->
-                    []
-            end
-        end,
-        [], couch_task_status:all()
-    ),
-    case couch_util:get_value(Prop, hd(Element), nil) of
-    nil ->
-        etap:bail("Could not get property '" ++ couch_util:to_list(Prop) ++
-            "' for task " ++ pid_to_list(Pid));
-    Value ->
-        Value
-    end.
-
-
-loop() ->
-    receive
-    {add, Props, From} ->
-        Resp = couch_task_status:add_task(Props),
-        From ! {ok, self(), Resp},
-        loop();
-    {update, Props, From} ->
-        Resp = couch_task_status:update(Props),
-        From ! {ok, self(), Resp},
-        loop();
-    {update_frequency, Msecs, From} ->
-        Resp = couch_task_status:set_update_frequency(Msecs),
-        From ! {ok, self(), Resp},
-        loop();
-    {done, From} ->
-        From ! {ok, self(), ok}
-    end.
-
-call(Pid, Command) ->
-    Pid ! {Command, self()},
-    wait(Pid).
-
-call(Pid, Command, Arg) ->
-    Pid ! {Command, Arg, self()},
-    wait(Pid).
-
-wait(Pid) ->
-    receive
-        {ok, Pid, Msg} -> Msg
-    after 1000 ->
-        throw(timeout_error)
-    end.
-
-test() ->
-    {ok, TaskStatusPid} = couch_task_status:start_link(),
-
-    TaskUpdater = fun() -> loop() end,
-    % create three updaters
-    Pid1 = spawn(TaskUpdater),
-    Pid2 = spawn(TaskUpdater),
-    Pid3 = spawn(TaskUpdater),
-
-    ok = call(Pid1, add, [{type, replication}, {progress, 0}]),
-    etap:is(
-        length(couch_task_status:all()),
-        1,
-        "Started a task"
-    ),
-    Task1StartTime = get_task_prop(Pid1, started_on),
-    etap:is(
-        is_integer(Task1StartTime),
-        true,
-        "Task start time is defined."
-    ),
-    etap:is(
-        get_task_prop(Pid1, updated_on),
-        Task1StartTime,
-        "Task's start time is the same as the update time before an update."
-    ),
-
-    etap:is(
-        call(Pid1, add, [{type, compaction}, {progress, 0}]),
-        {add_task_error, already_registered},
-        "Unable to register multiple tasks for a single Pid."
-    ),
-
-    etap:is(
-        get_task_prop(Pid1, type),
-        replication,
-        "Task type is 'replication'."
-    ),
-    etap:is(
-        get_task_prop(Pid1, progress),
-        0,
-        "Task progress is 0."
-    ),
-
-    ok = timer:sleep(1000),
-    call(Pid1, update, [{progress, 25}]),
-    etap:is(
-        get_task_prop(Pid1, progress),
-        25,
-        "Task progress is 25."
-    ),
-    etap:is(
-        get_task_prop(Pid1, updated_on) > Task1StartTime,
-        true,
-        "Task's last update time has increased after an update."
-    ),
-
-    call(Pid2, add, [{type, compaction}, {progress, 0}]),
-    etap:is(
-        length(couch_task_status:all()),
-        2,
-        "Started a second task."
-    ),
-    Task2StartTime = get_task_prop(Pid2, started_on),
-    etap:is(
-        is_integer(Task2StartTime),
-        true,
-        "Second task's start time is defined."
-    ),
-    etap:is(
-        get_task_prop(Pid2, updated_on),
-        Task2StartTime,
-        "Second task's start time is the same as the update time before an update."
-    ),
-
-    etap:is(
-        get_task_prop(Pid2, type),
-        compaction,
-        "Second task's type is 'compaction'."
-    ),
-    etap:is(
-        get_task_prop(Pid2, progress),
-        0,
-        "Second task's progress is 0."
-    ),
-
-    ok = timer:sleep(1000),
-    call(Pid2, update, [{progress, 33}]),
-    etap:is(
-        get_task_prop(Pid2, progress),
-        33,
-        "Second task's progress updated to 33."
-    ),
-    etap:is(
-        get_task_prop(Pid2, updated_on) > Task2StartTime,
-        true,
-        "Second task's last update time has increased after an update."
-    ),
-
-    call(Pid3, add, [{type, indexer}, {progress, 0}]),
-    etap:is(
-        length(couch_task_status:all()),
-        3,
-        "Registered a third task."
-    ),
-    Task3StartTime = get_task_prop(Pid3, started_on),
-    etap:is(
-        is_integer(Task3StartTime),
-        true,
-        "Third task's start time is defined."
-    ),
-    etap:is(
-        get_task_prop(Pid3, updated_on),
-        Task3StartTime,
-        "Third task's start time is the same as the update time before an update."
-    ),
-
-    etap:is(
-        get_task_prop(Pid3, type),
-        indexer,
-        "Third task's type is 'indexer'."
-    ),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        0,
-        "Third task's progress is 0."
-    ),
-
-    ok = timer:sleep(1000),
-    call(Pid3, update, [{progress, 50}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        50,
-        "Third task's progress updated to 50."
-    ),
-    etap:is(
-        get_task_prop(Pid3, updated_on) > Task3StartTime,
-        true,
-        "Third task's last update time has increased after an update."
-    ),
-
-    call(Pid3, update_frequency, 500),
-    call(Pid3, update, [{progress, 66}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        66,
-        "Third task's progress updated to 66."
-    ),
-
-    call(Pid3, update, [{progress, 67}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        66,
-        "Task update dropped because of frequency limit."
-    ),
-
-    call(Pid3, update_frequency, 0),
-    call(Pid3, update, [{progress, 77}]),
-    etap:is(
-        get_task_prop(Pid3, progress),
-        77,
-        "Task updated after reseting frequency limit."
-    ),
-
-
-    call(Pid1, done),
-    etap:is(
-        length(couch_task_status:all()),
-        2,
-        "First task finished."
-    ),
-
-    call(Pid2, done),
-    etap:is(
-        length(couch_task_status:all()),
-        1,
-        "Second task finished."
-    ),
-
-    call(Pid3, done),
-    etap:is(
-        length(couch_task_status:all()),
-        0,
-        "Third task finished."
-    ),
-
-    erlang:monitor(process, TaskStatusPid),
-    couch_task_status:stop(),
-    receive
-        {'DOWN', _, _, TaskStatusPid, _} ->
-            ok
-    after
-        1000 ->
-            throw(timeout_error)
-    end,
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/0e0cde44/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 091dae9..4657656 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    090-task-status.t \
     100-ref-counter.t \
     120-stats-collect.t \
     121-stats-aggregates.cfg \


[16/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 070-couch-db.t etap test suite to eunit

Fix ERL_LIB environment variable setting. Add ?tempdb macros for
unique temporary database name generation.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/e722195f
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/e722195f
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/e722195f

Branch: refs/heads/1963-eunit
Commit: e722195ff76244f9011dff5ff1b20bb2474dc101
Parents: 5f6a0b6
Author: Alexander Shorin <kx...@apache.org>
Authored: Mon May 19 05:36:06 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am          |  1 +
 test/couchdb/couch_db_tests.erl   | 90 ++++++++++++++++++++++++++++++++++
 test/couchdb/couchdb_tests.hrl.in |  7 +++
 test/couchdb/run.in               |  2 +-
 test/etap/070-couch-db.t          | 73 ---------------------------
 test/etap/Makefile.am             |  1 -
 6 files changed, 99 insertions(+), 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/e722195f/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 396a36b..b8ad5ed 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -26,6 +26,7 @@ eunit_files = \
     couch_work_queue_tests.erl \
     couch_stream_tests.erl \
     couch_key_tree_tests.erl \
+    couch_db_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e722195f/test/couchdb/couch_db_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_db_tests.erl b/test/couchdb/couch_db_tests.erl
new file mode 100644
index 0000000..d651126
--- /dev/null
+++ b/test/couchdb/couch_db_tests.erl
@@ -0,0 +1,90 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_db_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+
+setup() ->
+    {ok, _} = couch_server_sup:start_link(?CONFIG_CHAIN),
+    ok.
+
+teardown(_) ->
+    couch_server_sup:stop().
+
+
+create_delete_db_test_()->
+    {
+        "Database create/delete tests",
+        {
+            setup,
+            fun setup/0, fun teardown/1,
+            fun(_) ->
+                [should_create_db(),
+                 should_delete_db(),
+                 should_create_multiple_dbs(),
+                 should_delete_multiple_dbs()]
+            end
+        }
+    }.
+
+
+should_create_db() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, []),
+    ok = couch_db:close(Db),
+    {ok, AllDbs} = couch_server:all_databases(),
+    ?_assert(lists:member(DbName, AllDbs)).
+
+should_delete_db() ->
+    DbName = ?tempdb(),
+    couch_db:create(DbName, []),
+    couch_server:delete(DbName, []),
+    {ok, AllDbs} = couch_server:all_databases(),
+    ?_assertNot(lists:member(DbName, AllDbs)).
+
+should_create_multiple_dbs() ->
+    gen_server:call(couch_server, {set_max_dbs_open, 3}),
+
+    DbNames = [?tempdb() || _ <- lists:seq(1, 6)],
+    lists:foreach(fun(DbName) ->
+        {ok, Db} = couch_db:create(DbName, []),
+        ok = couch_db:close(Db)
+    end, DbNames),
+
+    {ok, AllDbs} = couch_server:all_databases(),
+    NumCreated = lists:foldl(fun(DbName, Acc) ->
+        ?assert(lists:member(DbName, AllDbs)),
+        Acc+1
+    end, 0, DbNames),
+
+    ?_assertEqual(NumCreated, 6).
+
+should_delete_multiple_dbs() ->
+    DbNames = [?tempdb() || _ <- lists:seq(1, 6)],
+    lists:foreach(fun(DbName) ->
+        {ok, Db} = couch_db:create(DbName, []),
+        ok = couch_db:close(Db)
+    end, DbNames),
+
+    lists:foreach(fun(DbName) ->
+        ok = couch_server:delete(DbName, [])
+    end, DbNames),
+
+    {ok, AllDbs} = couch_server:all_databases(),
+    NumDeleted = lists:foldl(fun(DbName, Acc) ->
+        ?assertNot(lists:member(DbName, AllDbs)),
+        Acc + 1
+    end, 0, DbNames),
+
+    ?_assertEqual(NumDeleted, 6).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e722195f/test/couchdb/couchdb_tests.hrl.in
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_tests.hrl.in b/test/couchdb/couchdb_tests.hrl.in
index e5a8b4e..1014c78 100644
--- a/test/couchdb/couchdb_tests.hrl.in
+++ b/test/couchdb/couchdb_tests.hrl.in
@@ -28,3 +28,10 @@
         FileName = lists:flatten(io_lib:format("~p-~p.~p.~p", [N, A, B, C])),
         filename:join([?TEMPDIR, FileName])
     end).
+-define(tempdb,
+    fun() ->
+            Nums = tuple_to_list(erlang:now()),
+            Prefix = "eunit-test-db",
+            Suffix = lists:concat([integer_to_list(Num) || Num <- Nums]),
+            list_to_binary(Prefix ++ "-" ++ Suffix)
+    end).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e722195f/test/couchdb/run.in
----------------------------------------------------------------------
diff --git a/test/couchdb/run.in b/test/couchdb/run.in
index 80f4041..06109aa 100644
--- a/test/couchdb/run.in
+++ b/test/couchdb/run.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env escript
 %% -*- erlang -*-
-%%! -DTEST -pa @abs_top_builddir@/test/couchdb/ebin
+%%! -DTEST -env ERL_LIBS @abs_top_builddir@/src:$ERL_LIBS -pa @abs_top_builddir@/test/couchdb/ebin
 %%
 %% Licensed under the Apache License, Version 2.0 (the "License"); you may not
 %% use this file except in compliance with the License. You may obtain a copy of

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e722195f/test/etap/070-couch-db.t
----------------------------------------------------------------------
diff --git a/test/etap/070-couch-db.t b/test/etap/070-couch-db.t
deleted file mode 100755
index 787d6c6..0000000
--- a/test/etap/070-couch-db.t
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(4),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    couch_server_sup:start_link(test_util:config_files()),
-
-    couch_db:create(<<"etap-test-db">>, []),
-    {ok, AllDbs} = couch_server:all_databases(),
-    etap:ok(lists:member(<<"etap-test-db">>, AllDbs), "Database was created."),
-
-    couch_server:delete(<<"etap-test-db">>, []),
-    {ok, AllDbs2} = couch_server:all_databases(),
-    etap:ok(not lists:member(<<"etap-test-db">>, AllDbs2),
-        "Database was deleted."),
-
-    gen_server:call(couch_server, {set_max_dbs_open, 3}),
-    MkDbName = fun(Int) -> list_to_binary("lru-" ++ integer_to_list(Int)) end,
-
-    lists:foreach(fun(Int) ->
-        {ok, TestDbs} = couch_server:all_databases(),
-        ok = case lists:member(MkDbName(Int), TestDbs) of
-            true -> couch_server:delete(MkDbName(Int), []);
-            _ -> ok
-        end,
-        {ok, Db} = couch_db:create(MkDbName(Int), []),
-        ok = couch_db:close(Db)
-    end, lists:seq(1, 6)),
-
-    {ok, AllDbs3} = couch_server:all_databases(),
-    NumCreated = lists:foldl(fun(Int, Acc) ->
-        true = lists:member(MkDbName(Int), AllDbs3),
-        Acc+1
-    end, 0, lists:seq(1, 6)),
-    etap:is(6, NumCreated, "Created all databases."),
-
-    lists:foreach(fun(Int) ->
-        ok = couch_server:delete(MkDbName(Int), [])
-    end, lists:seq(1, 6)),
-
-    {ok, AllDbs4} = couch_server:all_databases(),
-    NumDeleted = lists:foldl(fun(Int, Acc) ->
-        false = lists:member(MkDbName(Int), AllDbs4),
-        Acc+1
-    end, 0, lists:seq(1, 6)),
-    etap:is(6, NumDeleted, "Deleted all databases."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e722195f/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index fe50da3..408ca40 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    070-couch-db.t \
     072-cleanup.t \
     073-changes.t \
     074-doc-update-conflicts.t \


[05/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 020-btree-basics.t and 021-btree-reductions.t etap suites to eunit

Both merged into single suite since they tests single target and shares
common bits.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/92354100
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/92354100
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/92354100

Branch: refs/heads/1963-eunit
Commit: 92354100717f2d44ef9eb6a5c7b8350f7d36790e
Parents: 436ad3a
Author: Alexander Shorin <kx...@apache.org>
Authored: Sat May 17 03:28:14 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:55:24 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am           |   1 +
 test/couchdb/couch_btree_tests.erl | 551 ++++++++++++++++++++++++++++++++
 test/etap/020-btree-basics.t       | 265 ---------------
 test/etap/021-btree-reductions.t   | 237 --------------
 test/etap/Makefile.am              |   2 -
 5 files changed, 552 insertions(+), 504 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/92354100/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index c3d170f..7ad8ae5 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -20,6 +20,7 @@ eunit_files = \
     couchdb_modules_load_tests.erl \
     couch_util_tests.erl \
     couch_file_tests.erl \
+    couch_btree_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/92354100/test/couchdb/couch_btree_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_btree_tests.erl b/test/couchdb/couch_btree_tests.erl
new file mode 100644
index 0000000..d909d0a
--- /dev/null
+++ b/test/couchdb/couch_btree_tests.erl
@@ -0,0 +1,551 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_btree_tests).
+
+-include_lib("couchdb_tests.hrl").
+-include("../../src/couchdb/couch_db.hrl").
+
+-define(ROWS, 1000).
+
+
+setup() ->
+    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
+    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none},
+                                             {reduce, fun reduce_fun/2}]),
+    {Fd, Btree}.
+
+setup_kvs(_) ->
+    setup().
+
+setup_red() ->
+    {_, EvenOddKVs} = lists:foldl(
+        fun(Idx, {Key, Acc}) ->
+            case Key of
+                "even" -> {"odd", [{{Key, Idx}, 1} | Acc]};
+                _ -> {"even", [{{Key, Idx}, 1} | Acc]}
+            end
+        end, {"odd", []}, lists:seq(1, ?ROWS)),
+    {Fd, Btree} = setup(),
+    {ok, Btree1} = couch_btree:add_remove(Btree, EvenOddKVs, []),
+    {Fd, Btree1}.
+setup_red(_) ->
+    setup_red().
+
+teardown(Fd) when is_pid(Fd) ->
+    ok = couch_file:close(Fd);
+teardown({Fd, _}) ->
+    teardown(Fd).
+teardown(_, {Fd, _}) ->
+    teardown(Fd).
+
+
+kvs_test_funs() ->
+    [
+        fun should_set_fd_correctly/2,
+        fun should_set_root_correctly/2,
+        fun should_create_zero_sized_btree/2,
+        fun should_set_reduce_option/2,
+        fun should_fold_over_empty_btree/2,
+        fun should_add_all_keys/2,
+        fun should_continuously_add_new_kv/2,
+        fun should_continuously_remove_keys/2,
+        fun should_insert_keys_in_reversed_order/2,
+        fun should_add_every_odd_key_remove_every_even/2,
+        fun should_add_every_even_key_remove_every_old/2
+    ].
+
+red_test_funs() ->
+    [
+        fun should_reduce_whole_range/2,
+        fun should_reduce_first_half/2,
+        fun should_reduce_second_half/2
+    ].
+
+
+btree_open_test_() ->
+    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
+    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none}]),
+    {
+        "Ensure that created btree is really a btree record",
+        ?_assert(is_record(Btree, btree))
+    }.
+
+sorted_kvs_test_() ->
+    Funs = kvs_test_funs(),
+    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, ?ROWS)],
+    {
+        "BTree with sorted keys",
+        {
+            foreachx,
+            fun setup_kvs/1, fun teardown/2,
+            [{Sorted, Fun} || Fun <- Funs]
+        }
+    }.
+
+rsorted_kvs_test_() ->
+    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, ?ROWS)],
+    Funs = kvs_test_funs(),
+    Reversed = Sorted,
+    {
+        "BTree with backward sorted keys",
+        {
+            foreachx,
+            fun setup_kvs/1, fun teardown/2,
+            [{Reversed, Fun} || Fun <- Funs]
+        }
+    }.
+
+shuffled_kvs_test_() ->
+    Funs = kvs_test_funs(),
+    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, ?ROWS)],
+    Shuffled = shuffle(Sorted),
+    {
+        "BTree with shuffled keys",
+        {
+            foreachx,
+            fun setup_kvs/1, fun teardown/2,
+            [{Shuffled, Fun} || Fun <- Funs]
+        }
+    }.
+
+reductions_test_() ->
+    {
+        "BTree reductions",
+        [
+            {
+                "Common tests",
+                {
+                    foreach,
+                    fun setup_red/0, fun teardown/1,
+                    [
+                        fun should_reduce_without_specified_direction/1,
+                        fun should_reduce_forward/1,
+                        fun should_reduce_backward/1
+                    ]
+                }
+            },
+            {
+                "Range requests",
+                [
+                    {
+                        "Forward direction",
+                        {
+                            foreachx,
+                            fun setup_red/1, fun teardown/2,
+                            [{fwd, F} || F <- red_test_funs()]
+                        }
+                    },
+                    {
+                        "Backward direction",
+                        {
+                            foreachx,
+                            fun setup_red/1, fun teardown/2,
+                            [{rev, F} || F <- red_test_funs()]
+                        }
+                    }
+                ]
+            }
+        ]
+    }.
+
+
+should_set_fd_correctly(_, {Fd, Btree}) ->
+    ?_assertMatch(Fd, Btree#btree.fd).
+
+should_set_root_correctly(_, {_, Btree}) ->
+    ?_assertMatch(nil, Btree#btree.root).
+
+should_create_zero_sized_btree(_, {_, Btree}) ->
+    ?_assertMatch(0, couch_btree:size(Btree)).
+
+should_set_reduce_option(_, {_, Btree}) ->
+    ReduceFun = fun reduce_fun/2,
+    Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]),
+    ?_assertMatch(ReduceFun, Btree1#btree.reduce).
+
+should_fold_over_empty_btree(_, {_, Btree}) ->
+    {ok, _, EmptyRes} = couch_btree:foldl(Btree, fun(_, X) -> {ok, X+1} end, 0),
+    ?_assertEqual(EmptyRes, 0).
+
+should_add_all_keys(KeyValues, {Fd, Btree}) ->
+    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
+    [
+        should_return_complete_btree_on_adding_all_keys(KeyValues, Btree1),
+        should_have_non_zero_size(Btree1),
+        should_have_lesser_size_than_file(Fd, Btree1),
+        should_keep_root_pointer_to_kp_node(Fd, Btree1),
+        should_remove_all_keys(KeyValues, Btree1)
+    ].
+
+should_return_complete_btree_on_adding_all_keys(KeyValues, Btree) ->
+    ?_assert(test_btree(Btree, KeyValues)).
+
+should_have_non_zero_size(Btree) ->
+    ?_assert(couch_btree:size(Btree) > 0).
+
+should_have_lesser_size_than_file(Fd, Btree) ->
+    ?_assert((couch_btree:size(Btree) =< couch_file:bytes(Fd))).
+
+should_keep_root_pointer_to_kp_node(Fd, Btree) ->
+    ?_assertMatch({ok, {kp_node, _}},
+                  couch_file:pread_term(Fd, element(1, Btree#btree.root))).
+
+should_remove_all_keys(KeyValues, Btree) ->
+    Keys = keys(KeyValues),
+    {ok, Btree1} = couch_btree:add_remove(Btree, [], Keys),
+    {
+        "Should remove all the keys",
+        [
+            should_produce_valid_btree(Btree1, []),
+            should_be_empty(Btree1)
+        ]
+    }.
+
+should_continuously_add_new_kv(KeyValues, {_, Btree}) ->
+    {Btree1, _} = lists:foldl(
+        fun(KV, {BtAcc, PrevSize}) ->
+            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+            ?assert(couch_btree:size(BtAcc2) > PrevSize),
+            {BtAcc2, couch_btree:size(BtAcc2)}
+        end, {Btree, couch_btree:size(Btree)}, KeyValues),
+    {
+        "Should continuously add key-values to btree",
+        [
+            should_produce_valid_btree(Btree1, KeyValues),
+            should_not_be_empty(Btree1)
+        ]
+    }.
+
+should_continuously_remove_keys(KeyValues, {_, Btree}) ->
+    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
+    {Btree2, _} = lists:foldl(
+        fun({K, _}, {BtAcc, PrevSize}) ->
+            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
+            ?assert(couch_btree:size(BtAcc2) < PrevSize),
+            {BtAcc2, couch_btree:size(BtAcc2)}
+        end, {Btree1, couch_btree:size(Btree1)}, KeyValues),
+    {
+        "Should continuously remove keys from btree",
+        [
+            should_produce_valid_btree(Btree2, []),
+            should_be_empty(Btree2)
+        ]
+    }.
+
+should_insert_keys_in_reversed_order(KeyValues, {_, Btree}) ->
+    KeyValuesRev = lists:reverse(KeyValues),
+    {Btree1, _} = lists:foldl(
+        fun(KV, {BtAcc, PrevSize}) ->
+            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+            ?assert(couch_btree:size(BtAcc2) > PrevSize),
+            {BtAcc2, couch_btree:size(BtAcc2)}
+        end, {Btree, couch_btree:size(Btree)}, KeyValuesRev),
+    should_produce_valid_btree(Btree1, KeyValues).
+
+should_add_every_odd_key_remove_every_even(KeyValues, {_, Btree}) ->
+    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
+    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
+        case Count rem 2 == 0 of
+            true -> {Count + 1, [X | Left], Right};
+            false -> {Count + 1, Left, [X | Right]}
+        end
+                                            end, {0, [], []}, KeyValues),
+    ?_assert(test_add_remove(Btree1, Rem2Keys0, Rem2Keys1)).
+
+should_add_every_even_key_remove_every_old(KeyValues, {_, Btree}) ->
+    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
+    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
+        case Count rem 2 == 0 of
+            true -> {Count + 1, [X | Left], Right};
+            false -> {Count + 1, Left, [X | Right]}
+        end
+                                            end, {0, [], []}, KeyValues),
+    ?_assert(test_add_remove(Btree1, Rem2Keys1, Rem2Keys0)).
+
+
+should_reduce_without_specified_direction({_, Btree}) ->
+    ?_assertMatch(
+        {ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]},
+        fold_reduce(Btree, [])).
+
+should_reduce_forward({_, Btree}) ->
+    ?_assertMatch(
+        {ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]},
+        fold_reduce(Btree, [{dir, fwd}])).
+
+should_reduce_backward({_, Btree}) ->
+    ?_assertMatch(
+        {ok, [{{"even", _}, ?ROWS div 2}, {{"odd", _}, ?ROWS div 2}]},
+        fold_reduce(Btree, [{dir, rev}])).
+
+should_reduce_whole_range(fwd, {_, Btree}) ->
+    {SK, EK} = {{"even", 0}, {"odd", ?ROWS - 1}},
+    [
+        {
+            "include endkey",
+            ?_assertMatch(
+                {ok, [{{"odd", 1}, ?ROWS div 2},
+                      {{"even", 2}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, fwd},
+                                    {start_key, SK},
+                                    {end_key, EK}]))
+        },
+        {
+            "exclude endkey",
+            ?_assertMatch(
+                {ok, [{{"odd", 1}, (?ROWS div 2) - 1},
+                      {{"even", 2}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, fwd},
+                                    {start_key, SK},
+                                    {end_key_gt, EK}]))
+        }
+    ];
+should_reduce_whole_range(rev, {_, Btree}) ->
+    {SK, EK} = {{"odd", ?ROWS - 1}, {"even", 2}},
+    [
+        {
+            "include endkey",
+            ?_assertMatch(
+                {ok, [{{"even", ?ROWS}, ?ROWS div 2},
+                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, rev},
+                                    {start_key, SK},
+                                    {end_key, EK}]))
+        },
+        {
+            "exclude endkey",
+            ?_assertMatch(
+                {ok, [{{"even", ?ROWS}, (?ROWS div 2) - 1},
+                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, rev},
+                                    {start_key, SK},
+                                    {end_key_gt, EK}]))
+        }
+    ].
+
+should_reduce_first_half(fwd, {_, Btree}) ->
+    {SK, EK} = {{"even", 0}, {"odd", (?ROWS div 2) - 1}},
+    [
+        {
+            "include endkey",
+            ?_assertMatch(
+                {ok, [{{"odd", 1}, ?ROWS div 4},
+                      {{"even", 2}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, fwd},
+                                    {start_key, SK}, {end_key, EK}]))
+        },
+        {
+            "exclude endkey",
+            ?_assertMatch(
+                {ok, [{{"odd", 1}, (?ROWS div 4) - 1},
+                      {{"even", 2}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, fwd},
+                                    {start_key, SK},
+                                    {end_key_gt, EK}]))
+        }
+    ];
+should_reduce_first_half(rev, {_, Btree}) ->
+    {SK, EK} = {{"odd", ?ROWS - 1}, {"even", ?ROWS div 2}},
+    [
+        {
+            "include endkey",
+            ?_assertMatch(
+                {ok, [{{"even", ?ROWS}, (?ROWS div 4) + 1},
+                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, rev},
+                                    {start_key, SK},
+                                    {end_key, EK}]))
+        },
+        {
+            "exclude endkey",
+            ?_assertMatch(
+                {ok, [{{"even", ?ROWS}, ?ROWS div 4},
+                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
+                fold_reduce(Btree, [{dir, rev},
+                                    {start_key, SK},
+                                    {end_key_gt, EK}]))
+        }
+    ].
+
+should_reduce_second_half(fwd, {_, Btree}) ->
+    {SK, EK} = {{"even", ?ROWS div 2}, {"odd", ?ROWS - 1}},
+    [
+        {
+            "include endkey",
+            ?_assertMatch(
+                {ok, [{{"odd", 1}, ?ROWS div 2},
+                      {{"even", ?ROWS div 2}, (?ROWS div 4) + 1}]},
+                fold_reduce(Btree, [{dir, fwd},
+                                    {start_key, SK},
+                                    {end_key, EK}]))
+        },
+        {
+            "exclude endkey",
+            ?_assertMatch(
+                {ok, [{{"odd", 1}, (?ROWS div 2) - 1},
+                      {{"even", ?ROWS div 2}, (?ROWS div 4) + 1}]},
+                fold_reduce(Btree, [{dir, fwd},
+                                    {start_key, SK},
+                                    {end_key_gt, EK}]))
+        }
+    ];
+should_reduce_second_half(rev, {_, Btree}) ->
+    {SK, EK} = {{"odd", (?ROWS div 2) + 1}, {"even", 2}},
+    [
+        {
+            "include endkey",
+            ?_assertMatch(
+                {ok, [{{"even", ?ROWS}, ?ROWS div 2},
+                      {{"odd", (?ROWS div 2) + 1}, (?ROWS div 4) + 1}]},
+                fold_reduce(Btree, [{dir, rev},
+                                    {start_key, SK},
+                                    {end_key, EK}]))
+        },
+        {
+            "exclude endkey",
+            ?_assertMatch(
+                {ok, [{{"even", ?ROWS}, (?ROWS div 2) - 1},
+                      {{"odd", (?ROWS div 2) + 1}, (?ROWS div 4) + 1}]},
+                fold_reduce(Btree, [{dir, rev},
+                                    {start_key, SK},
+                                    {end_key_gt, EK}]))
+        }
+    ].
+
+should_produce_valid_btree(Btree, KeyValues) ->
+    ?_assert(test_btree(Btree, KeyValues)).
+
+should_be_empty(Btree) ->
+    ?_assertEqual(couch_btree:size(Btree), 0).
+
+should_not_be_empty(Btree) ->
+    ?_assert(couch_btree:size(Btree) > 0).
+
+fold_reduce(Btree, Opts) ->
+    GroupFun = fun({K1, _}, {K2, _}) ->
+        K1 == K2
+    end,
+    FoldFun = fun(GroupedKey, Unreduced, Acc) ->
+        {ok, [{GroupedKey, couch_btree:final_reduce(Btree, Unreduced)} | Acc]}
+    end,
+    couch_btree:fold_reduce(Btree, FoldFun, [],
+                            [{key_group_fun, GroupFun}] ++ Opts).
+
+
+keys(KVs) ->
+    [K || {K, _} <- KVs].
+
+reduce_fun(reduce, KVs) ->
+    length(KVs);
+reduce_fun(rereduce, Reds) ->
+    lists:sum(Reds).
+
+
+shuffle(List) ->
+    randomize(round(math:log(length(List)) + 0.5), List).
+
+randomize(1, List) ->
+    randomize(List);
+randomize(T, List) ->
+    lists:foldl(
+        fun(_E, Acc) ->
+            randomize(Acc)
+        end, randomize(List), lists:seq(1, (T - 1))).
+
+randomize(List) ->
+    D = lists:map(fun(A) -> {random:uniform(), A} end, List),
+    {_, D1} = lists:unzip(lists:keysort(1, D)),
+    D1.
+
+test_btree(Btree, KeyValues) ->
+    ok = test_key_access(Btree, KeyValues),
+    ok = test_lookup_access(Btree, KeyValues),
+    ok = test_final_reductions(Btree, KeyValues),
+    ok = test_traversal_callbacks(Btree, KeyValues),
+    true.
+
+test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
+    Btree2 = lists:foldl(
+        fun({K, _}, BtAcc) ->
+            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
+            BtAcc2
+        end, Btree, OutKeyValues),
+    true = test_btree(Btree2, RemainingKeyValues),
+
+    Btree3 = lists:foldl(
+        fun(KV, BtAcc) ->
+            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+            BtAcc2
+        end, Btree2, OutKeyValues),
+    true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues).
+
+test_key_access(Btree, List) ->
+    FoldFun = fun(Element, {[HAcc|TAcc], Count}) ->
+        case Element == HAcc of
+            true -> {ok, {TAcc, Count + 1}};
+            _ -> {ok, {TAcc, Count + 1}}
+        end
+    end,
+    Length = length(List),
+    Sorted = lists:sort(List),
+    {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}),
+    {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun,
+                                             {Sorted, 0}, [{dir, rev}]),
+    ok.
+
+test_lookup_access(Btree, KeyValues) ->
+    FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end,
+    lists:foreach(
+        fun({Key, Value}) ->
+            [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]),
+            {ok, _, true} = couch_btree:foldl(Btree, FoldFun,
+                                              {Key, Value}, [{start_key, Key}])
+        end, KeyValues).
+
+test_final_reductions(Btree, KeyValues) ->
+    KVLen = length(KeyValues),
+    FoldLFun = fun(_X, LeadingReds, Acc) ->
+        CountToStart = KVLen div 3 + Acc,
+        CountToStart = couch_btree:final_reduce(Btree, LeadingReds),
+        {ok, Acc + 1}
+    end,
+    FoldRFun = fun(_X, LeadingReds, Acc) ->
+        CountToEnd = KVLen - KVLen div 3 + Acc,
+        CountToEnd = couch_btree:final_reduce(Btree, LeadingReds),
+        {ok, Acc + 1}
+    end,
+    {LStartKey, _} = case KVLen of
+        0 -> {nil, nil};
+        _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues))
+    end,
+    {RStartKey, _} = case KVLen of
+        0 -> {nil, nil};
+        _ -> lists:nth(KVLen div 3, lists:sort(KeyValues))
+    end,
+    {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0,
+                                          [{start_key, LStartKey}]),
+    {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0,
+                                         [{dir, rev}, {start_key, RStartKey}]),
+    KVLen = FoldLRed + FoldRRed,
+    ok.
+
+test_traversal_callbacks(Btree, _KeyValues) ->
+    FoldFun = fun
+        (visit, _GroupedKey, _Unreduced, Acc) ->
+            {ok, Acc andalso false};
+        (traverse, _LK, _Red, Acc) ->
+            {skip, Acc andalso true}
+    end,
+    % With 250 items the root is a kp. Always skipping should reduce to true.
+    {ok, _, true} = couch_btree:fold(Btree, FoldFun, true, [{dir, fwd}]),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/92354100/test/etap/020-btree-basics.t
----------------------------------------------------------------------
diff --git a/test/etap/020-btree-basics.t b/test/etap/020-btree-basics.t
deleted file mode 100755
index b0fb2d2..0000000
--- a/test/etap/020-btree-basics.t
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() -> test_util:build_file("test/etap/temp.020").
-rows() -> 250.
-
--record(btree, {
-    fd,
-    root,
-    extract_kv,
-    assemble_kv,
-    less,
-    reduce,
-    compression
-}).
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(75),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-%% @todo Determine if this number should be greater to see if the btree was
-%% broken into multiple nodes. AKA "How do we appropiately detect if multiple
-%% nodes were created."
-test()->
-    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, rows())],
-    etap:ok(test_kvs(Sorted), "Testing sorted keys"),
-    etap:ok(test_kvs(lists:reverse(Sorted)), "Testing reversed sorted keys"),
-    etap:ok(test_kvs(shuffle(Sorted)), "Testing shuffled keys."),
-    ok.
-
-test_kvs(KeyValues) ->
-    ReduceFun = fun
-        (reduce, KVs) ->
-            length(KVs);
-        (rereduce, Reds) ->
-            lists:sum(Reds)
-    end,
-
-    Keys = [K || {K, _} <- KeyValues],
-
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none}]),
-    etap:ok(is_record(Btree, btree), "Created btree is really a btree record"),
-    etap:is(Btree#btree.fd, Fd, "Btree#btree.fd is set correctly."),
-    etap:is(Btree#btree.root, nil, "Btree#btree.root is set correctly."),
-    etap:is(0, couch_btree:size(Btree), "Empty btrees have a 0 size."),
-
-    Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]),
-    etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"),
-    {ok, _, EmptyRes} = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0),
-    etap:is(EmptyRes, 0, "Folding over an empty btree"),
-
-    {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []),
-    etap:ok(test_btree(Btree2, KeyValues),
-        "Adding all keys at once returns a complete btree."),
-
-    etap:is((couch_btree:size(Btree2) > 0), true,
-            "Non empty btrees have a size > 0."),
-    etap:is((couch_btree:size(Btree2) =< couch_file:bytes(Fd)), true,
-            "Btree size is <= file size."),
-
-    etap:fun_is(
-        fun
-            ({ok, {kp_node, _}}) -> true;
-            (_) -> false
-        end,
-        couch_file:pread_term(Fd, element(1, Btree2#btree.root)),
-        "Btree root pointer is a kp_node."
-    ),
-
-    {ok, Btree3} = couch_btree:add_remove(Btree2, [], Keys),
-    etap:ok(test_btree(Btree3, []),
-        "Removing all keys at once returns an empty btree."),
-
-    etap:is(0, couch_btree:size(Btree3),
-            "After removing all keys btree size is 0."),
-
-    {Btree4, _} = lists:foldl(fun(KV, {BtAcc, PrevSize}) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-        case couch_btree:size(BtAcc2) > PrevSize of
-        true ->
-            ok;
-        false ->
-            etap:bail("After inserting a value, btree size did not increase.")
-        end,
-        {BtAcc2, couch_btree:size(BtAcc2)}
-    end, {Btree3, couch_btree:size(Btree3)}, KeyValues),
-
-    etap:ok(test_btree(Btree4, KeyValues),
-        "Adding all keys one at a time returns a complete btree."),
-    etap:is((couch_btree:size(Btree4) > 0), true,
-            "Non empty btrees have a size > 0."),
-
-    {Btree5, _} = lists:foldl(fun({K, _}, {BtAcc, PrevSize}) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
-        case couch_btree:size(BtAcc2) < PrevSize of
-        true ->
-            ok;
-        false ->
-            etap:bail("After removing a key, btree size did not decrease.")
-        end,
-        {BtAcc2, couch_btree:size(BtAcc2)}
-    end, {Btree4, couch_btree:size(Btree4)}, KeyValues),
-    etap:ok(test_btree(Btree5, []),
-        "Removing all keys one at a time returns an empty btree."),
-    etap:is(0, couch_btree:size(Btree5),
-            "After removing all keys, one by one, btree size is 0."),
-
-    KeyValuesRev = lists:reverse(KeyValues),
-    {Btree6, _} = lists:foldl(fun(KV, {BtAcc, PrevSize}) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-        case couch_btree:size(BtAcc2) > PrevSize of
-        true ->
-            ok;
-        false ->
-            etap:is(false, true,
-                   "After inserting a value, btree size did not increase.")
-        end,
-        {BtAcc2, couch_btree:size(BtAcc2)}
-    end, {Btree5, couch_btree:size(Btree5)}, KeyValuesRev),
-    etap:ok(test_btree(Btree6, KeyValues),
-        "Adding all keys in reverse order returns a complete btree."),
-
-    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
-        case Count rem 2 == 0 of
-            true-> {Count+1, [X | Left], Right};
-            false -> {Count+1, Left, [X | Right]}
-        end
-    end, {0, [], []}, KeyValues),
-
-    etap:ok(test_add_remove(Btree6, Rem2Keys0, Rem2Keys1),
-        "Add/Remove every other key."),
-
-    etap:ok(test_add_remove(Btree6, Rem2Keys1, Rem2Keys0),
-        "Add/Remove opposite every other key."),
-
-    Size1 = couch_btree:size(Btree6),
-    {ok, Btree7} = couch_btree:add_remove(Btree6, [], [K||{K,_}<-Rem2Keys1]),
-    Size2 = couch_btree:size(Btree7),
-    etap:is((Size2 < Size1), true, "Btree size decreased"),
-    {ok, Btree8} = couch_btree:add_remove(Btree7, [], [K||{K,_}<-Rem2Keys0]),
-    Size3 = couch_btree:size(Btree8),
-    etap:is((Size3 < Size2), true, "Btree size decreased"),
-    etap:is(Size3, 0, "Empty btree has size 0."),
-    etap:ok(test_btree(Btree8, []),
-        "Removing both halves of every other key returns an empty btree."),
-
-    %% Third chunk (close out)
-    etap:is(couch_file:close(Fd), ok, "closing out"),
-    true.
-
-test_btree(Btree, KeyValues) ->
-    ok = test_key_access(Btree, KeyValues),
-    ok = test_lookup_access(Btree, KeyValues),
-    ok = test_final_reductions(Btree, KeyValues),
-    ok = test_traversal_callbacks(Btree, KeyValues),
-    true.
-
-test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
-    Btree2 = lists:foldl(fun({K, _}, BtAcc) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
-        BtAcc2
-    end, Btree, OutKeyValues),
-    true = test_btree(Btree2, RemainingKeyValues),
-
-    Btree3 = lists:foldl(fun(KV, BtAcc) ->
-        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-        BtAcc2
-    end, Btree2, OutKeyValues),
-    true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues).
-
-test_key_access(Btree, List) ->
-    FoldFun = fun(Element, {[HAcc|TAcc], Count}) ->
-        case Element == HAcc of
-            true -> {ok, {TAcc, Count + 1}};
-            _ -> {ok, {TAcc, Count + 1}}
-        end
-    end,
-    Length = length(List),
-    Sorted = lists:sort(List),
-    {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}),
-    {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun, {Sorted, 0}, [{dir, rev}]),
-    ok.
-
-test_lookup_access(Btree, KeyValues) ->
-    FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end,
-    lists:foreach(fun({Key, Value}) ->
-        [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]),
-        {ok, _, true} = couch_btree:foldl(Btree, FoldFun, {Key, Value}, [{start_key, Key}])
-    end, KeyValues).
-
-test_final_reductions(Btree, KeyValues) ->
-    KVLen = length(KeyValues),
-    FoldLFun = fun(_X, LeadingReds, Acc) ->
-        CountToStart = KVLen div 3 + Acc,
-        CountToStart = couch_btree:final_reduce(Btree, LeadingReds),
-        {ok, Acc+1}
-    end,
-    FoldRFun = fun(_X, LeadingReds, Acc) ->
-        CountToEnd = KVLen - KVLen div 3 + Acc,
-        CountToEnd = couch_btree:final_reduce(Btree, LeadingReds),
-        {ok, Acc+1}
-    end,
-    {LStartKey, _} = case KVLen of
-        0 -> {nil, nil};
-        _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues))
-    end,
-    {RStartKey, _} = case KVLen of
-        0 -> {nil, nil};
-        _ -> lists:nth(KVLen div 3, lists:sort(KeyValues))
-    end,
-    {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0, [{start_key, LStartKey}]),
-    {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0, [{dir, rev}, {start_key, RStartKey}]),
-    KVLen = FoldLRed + FoldRRed,
-    ok.
-
-test_traversal_callbacks(Btree, _KeyValues) ->
-    FoldFun =
-    fun
-        (visit, _GroupedKey, _Unreduced, Acc) ->
-            {ok, Acc andalso false};
-        (traverse, _LK, _Red, Acc) ->
-            {skip, Acc andalso true}
-    end,
-    % With 250 items the root is a kp. Always skipping should reduce to true.
-    {ok, _, true} = couch_btree:fold(Btree, FoldFun, true, [{dir, fwd}]),
-    ok.
-
-shuffle(List) ->
-   randomize(round(math:log(length(List)) + 0.5), List).
-
-randomize(1, List) ->
-   randomize(List);
-randomize(T, List) ->
-    lists:foldl(fun(_E, Acc) ->
-        randomize(Acc)
-    end, randomize(List), lists:seq(1, (T - 1))).
-
-randomize(List) ->
-    D = lists:map(fun(A) ->
-        {random:uniform(), A}
-    end, List),
-    {_, D1} = lists:unzip(lists:keysort(1, D)),
-    D1.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/92354100/test/etap/021-btree-reductions.t
----------------------------------------------------------------------
diff --git a/test/etap/021-btree-reductions.t b/test/etap/021-btree-reductions.t
deleted file mode 100755
index e80ac2d..0000000
--- a/test/etap/021-btree-reductions.t
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() -> "./test/etap/temp.021".
-rows() -> 1000.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(20),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test()->
-    ReduceFun = fun
-        (reduce, KVs) -> length(KVs);
-        (rereduce, Reds) -> lists:sum(Reds)
-    end,
-
-    {ok, Fd} = couch_file:open(filename(), [create,overwrite]),
-    {ok, Btree} = couch_btree:open(nil, Fd, [{reduce, ReduceFun}]),
-
-    % Create a list, of {"even", Value} or {"odd", Value} pairs.
-    {_, EvenOddKVs} = lists:foldl(fun(Idx, {Key, Acc}) ->
-        case Key of
-            "even" -> {"odd", [{{Key, Idx}, 1} | Acc]};
-            _ -> {"even", [{{Key, Idx}, 1} | Acc]}
-        end
-    end, {"odd", []}, lists:seq(1, rows())),
-
-    {ok, Btree2} = couch_btree:add_remove(Btree, EvenOddKVs, []),
-
-    GroupFun = fun({K1, _}, {K2, _}) -> K1 == K2 end,
-    FoldFun = fun(GroupedKey, Unreduced, Acc) ->
-        {ok, [{GroupedKey, couch_btree:final_reduce(Btree2, Unreduced)} | Acc]}
-    end,
-
-    {SK1, EK1} = {{"even", -1}, {"even", foo}},
-    {SK2, EK2} = {{"odd", -1}, {"odd", foo}},
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}]),
-        "Reduction works with no specified direction, startkey, or endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, fwd}]),
-        "Reducing forward works with no startkey or endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{key_group_fun, GroupFun}, {dir, rev}]),
-        "Reducing backwards works with no startkey or endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}, {{"even", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK2}]),
-        "Reducing works over the entire range with startkey and endkey set."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"even", _}, 500}]}) -> true;
-            (_) -> false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK1}, {end_key, EK1}]),
-        "Reducing forward over first half works with a startkey and endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}]}) -> true;
-            (_) -> false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, fwd}, {key_group_fun, GroupFun}, {start_key, SK2}, {end_key, EK2}]),
-        "Reducing forward over second half works with second startkey and endkey"
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"odd", _}, 500}]}) -> true;
-            (_) -> false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK2}]),
-        "Reducing in reverse works after swapping the startkey and endkey."
-    ),
-
-    etap:fun_is(
-        fun
-            ({ok, [{{"even", _}, 500}, {{"odd", _}, 500}]}) ->
-                true;
-            (_) ->
-                false
-        end,
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [{dir, rev}, {key_group_fun, GroupFun}, {start_key, EK2}, {end_key, SK1}]),
-        "Reducing in reverse results in reversed accumulator."
-    ),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key, {"odd", rows() + 1}}
-        ]),
-        {ok, [{{"odd", 1}, 500}, {{"even", 2}, 500}]},
-        "Right fold reduce value for whole range with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key_gt, {"odd", 999}}
-        ]),
-        {ok, [{{"odd", 1}, 499}, {{"even", 2}, 500}]},
-        "Right fold reduce value for whole range without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 500}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for whole reversed range with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key_gt, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 499}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for whole reversed range without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key, {"odd", 499}}
-        ]),
-        {ok, [{{"odd", 1}, 250}, {{"even", 2}, 500}]},
-        "Right fold reduce value for first half with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 0}}, {end_key_gt, {"odd", 499}}
-        ]),
-        {ok, [{{"odd", 1}, 249}, {{"even", 2}, 500}]},
-        "Right fold reduce value for first half without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key, {"even", 500}}
-        ]),
-        {ok, [{{"even", 1000}, 251}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for first half reversed with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 999}}, {end_key_gt, {"even", 500}}
-        ]),
-        {ok, [{{"even", 1000}, 250}, {{"odd", 999}, 500}]},
-        "Right fold reduce value for first half reversed without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 500}}, {end_key, {"odd", 999}}
-        ]),
-        {ok, [{{"odd", 1}, 500}, {{"even", 500}, 251}]},
-        "Right fold reduce value for second half with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, fwd}, {key_group_fun, GroupFun},
-            {start_key, {"even", 500}}, {end_key_gt, {"odd", 999}}
-        ]),
-        {ok, [{{"odd", 1}, 499}, {{"even", 500}, 251}]},
-        "Right fold reduce value for second half without inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 501}}, {end_key, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 500}, {{"odd", 501}, 251}]},
-        "Right fold reduce value for second half reversed with inclusive end key"),
-
-    etap:is(
-        couch_btree:fold_reduce(Btree2, FoldFun, [], [
-            {dir, rev}, {key_group_fun, GroupFun},
-            {start_key, {"odd", 501}}, {end_key_gt, {"even", 2}}
-        ]),
-        {ok, [{{"even", 1000}, 499}, {{"odd", 501}, 251}]},
-        "Right fold reduce value for second half reversed without inclusive end key"),
-
-    couch_file:close(Fd).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/92354100/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 7ac28b3..2e86144 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,8 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    020-btree-basics.t \
-    021-btree-reductions.t \
     030-doc-from-json.t \
     031-doc-to-json.t \
     040-util.t \


[13/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 043-find-in-binary.t etap test suite to eunit

It been merged into couch_util_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/720e32b9
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/720e32b9
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/720e32b9

Branch: refs/heads/1963-eunit
Commit: 720e32b9924cdedec844c4866a131ddd0ae91c88
Parents: 8d0ab7a
Author: Alexander Shorin <kx...@apache.org>
Authored: Sun May 18 14:35:58 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/couch_util_tests.erl | 35 +++++++++++++++++
 test/etap/043-find-in-binary.t    | 68 ----------------------------------
 test/etap/Makefile.am             |  1 -
 3 files changed, 35 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/720e32b9/test/couchdb/couch_util_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_util_tests.erl b/test/couchdb/couch_util_tests.erl
index d952f81..5bef3e2 100644
--- a/test/couchdb/couch_util_tests.erl
+++ b/test/couchdb/couch_util_tests.erl
@@ -100,3 +100,38 @@ verify_test() ->
     ?assert(couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>)),
     ?assertNot(couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>)),
     ?assertNot(couch_util:verify(nil, <<"ahBase3r">>)).
+
+find_in_binary_test_() ->
+    Cases = [
+        {<<"foo">>, <<"foobar">>, {exact, 0}},
+        {<<"foo">>, <<"foofoo">>, {exact, 0}},
+        {<<"foo">>, <<"barfoo">>, {exact, 3}},
+        {<<"foo">>, <<"barfo">>, {partial, 3}},
+        {<<"f">>, <<"fobarfff">>, {exact, 0}},
+        {<<"f">>, <<"obarfff">>, {exact, 4}},
+        {<<"f">>, <<"obarggf">>, {exact, 6}},
+        {<<"f">>, <<"f">>, {exact, 0}},
+        {<<"f">>, <<"g">>, not_found},
+        {<<"foo">>, <<"f">>, {partial, 0}},
+        {<<"foo">>, <<"g">>, not_found},
+        {<<"foo">>, <<"">>, not_found},
+        {<<"fofo">>, <<"foofo">>, {partial, 3}},
+        {<<"foo">>, <<"gfobarfo">>, {partial, 6}},
+        {<<"foo">>, <<"gfobarf">>, {partial, 6}},
+        {<<"foo">>, <<"gfobar">>, not_found},
+        {<<"fog">>, <<"gbarfogquiz">>, {exact, 4}},
+        {<<"ggg">>, <<"ggg">>, {exact, 0}},
+        {<<"ggg">>, <<"ggggg">>, {exact, 0}},
+        {<<"ggg">>, <<"bggg">>, {exact, 1}},
+        {<<"ggg">>, <<"bbgg">>, {partial, 2}},
+        {<<"ggg">>, <<"bbbg">>, {partial, 3}},
+        {<<"ggg">>, <<"bgbggbggg">>, {exact, 6}},
+        {<<"ggg">>, <<"bgbggb">>, not_found}
+    ],
+    lists:map(
+        fun({Needle, Haystack, Result}) ->
+            Msg = lists:flatten(io_lib:format("Looking for ~s in ~s",
+                                              [Needle, Haystack])),
+            {Msg, ?_assertMatch(Result,
+                                couch_util:find_in_binary(Needle, Haystack))}
+        end, Cases).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/720e32b9/test/etap/043-find-in-binary.t
----------------------------------------------------------------------
diff --git a/test/etap/043-find-in-binary.t b/test/etap/043-find-in-binary.t
deleted file mode 100755
index dca1d9c..0000000
--- a/test/etap/043-find-in-binary.t
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(length(cases())),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    lists:foreach(fun({Needle, Haystack, Result}) ->
-        try
-        Msg = io_lib:format("Looking for ~s in ~s", [Needle, Haystack]),
-        etap:is(couch_util:find_in_binary(Needle, Haystack), Result, Msg)
-        catch _T:_R ->
-            etap:diag("~p", [{_T, _R}])
-        end
-    end, cases()),
-    ok.
-
-
-cases() ->
-    [
-        {<<"foo">>, <<"foobar">>, {exact, 0}},
-        {<<"foo">>, <<"foofoo">>, {exact, 0}},
-        {<<"foo">>, <<"barfoo">>, {exact, 3}},
-        {<<"foo">>, <<"barfo">>, {partial, 3}},
-        {<<"f">>, <<"fobarfff">>, {exact, 0}},
-        {<<"f">>, <<"obarfff">>, {exact, 4}},
-        {<<"f">>, <<"obarggf">>, {exact, 6}},
-        {<<"f">>, <<"f">>, {exact, 0}},
-        {<<"f">>, <<"g">>, not_found},
-        {<<"foo">>, <<"f">>, {partial, 0}},
-        {<<"foo">>, <<"g">>, not_found},
-        {<<"foo">>, <<"">>, not_found},
-        {<<"fofo">>, <<"foofo">>, {partial, 3}},
-        {<<"foo">>, <<"gfobarfo">>, {partial, 6}},
-        {<<"foo">>, <<"gfobarf">>, {partial, 6}},
-        {<<"foo">>, <<"gfobar">>, not_found},
-        {<<"fog">>, <<"gbarfogquiz">>, {exact, 4}},
-        {<<"ggg">>, <<"ggg">>, {exact, 0}},
-        {<<"ggg">>, <<"ggggg">>, {exact, 0}},
-        {<<"ggg">>, <<"bggg">>, {exact, 1}},
-        {<<"ggg">>, <<"bbgg">>, {partial, 2}},
-        {<<"ggg">>, <<"bbbg">>, {partial, 3}},
-        {<<"ggg">>, <<"bgbggbggg">>, {exact, 6}},
-        {<<"ggg">>, <<"bgbggb">>, not_found}
-    ].

http://git-wip-us.apache.org/repos/asf/couchdb/blob/720e32b9/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 6bd2dad..4eef1a0 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    043-find-in-binary.t \
     050-stream.t \
     060-kt-merging.t \
     061-kt-missing-leaves.t \


[31/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 150-invalid-view-seq.t etap test suite to eunit

Merged into couchdb_views_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/79c69a9c
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/79c69a9c
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/79c69a9c

Branch: refs/heads/1963-eunit
Commit: 79c69a9ca1a309728b5551392f198b4b2aba424d
Parents: a36d630
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 08:49:32 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:53 2014 +0400

----------------------------------------------------------------------
 test/couchdb/couchdb_views_tests.erl | 142 +++++++++++++++++++++--
 test/etap/150-invalid-view-seq.t     | 183 ------------------------------
 test/etap/Makefile.am                |   1 -
 3 files changed, 131 insertions(+), 195 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/79c69a9c/test/couchdb/couchdb_views_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_views_tests.erl b/test/couchdb/couchdb_views_tests.erl
index be361c5..ec4d000 100644
--- a/test/couchdb/couchdb_views_tests.erl
+++ b/test/couchdb/couchdb_views_tests.erl
@@ -15,30 +15,50 @@
 -include_lib("../../src/couchdb/couch_db.hrl").
 -include_lib("couchdb_tests.hrl").
 
--define(ADMIN_USER, #user_ctx{roles=[<<"_admin">>]}).
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
 
 
 start() ->
-    couch_server_sup:start_link(?CONFIG_CHAIN),
+    {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
     % disable logging to reduce noise in stdout
     couch_config:set("log", "level", "none", false),
-    ok.
+    % disable sasl to not throw large stack trace into stderr for
+    % should_not_remember_docs_in_index_after_backup_restore_test
+    couch_config:set("log", "include_sasl", "false", false),
+    Pid.
 
-stop(_) ->
+stop(Pid) ->
+    erlang:monitor(process, Pid),
     couch_server_sup:stop(),
-    ok.
+    receive
+        {'DOWN', _, _, Pid, _} ->
+            ok
+    after 1000 ->
+        throw({timeout, server_stop})
+    end.
 
 setup() ->
     DbName = ?tempdb(),
-    {ok, _} = couch_db:create(DbName, [{user_ctx, ?ADMIN_USER}]),
+    {ok, _} = couch_db:create(DbName, [?ADMIN_USER]),
     FooRev = create_design_doc(DbName, <<"_design/foo">>, <<"bar">>),
     ok = query_view(DbName, "foo", "bar"),
     BooRev = create_design_doc(DbName, <<"_design/boo">>, <<"baz">>),
     ok = query_view(DbName, "boo", "baz"),
     {DbName, {FooRev, BooRev}}.
 
+setup_with_docs() ->
+    DbName = ?tempdb(),
+    {ok, _} = couch_db:create(DbName, [?ADMIN_USER]),
+    create_docs(DbName),
+    create_design_doc(DbName),
+    backup_db_file(DbName),
+    create_new_doc(DbName),
+    DbName.
+
 teardown({DbName, _}) ->
-    ok = couch_server:delete(DbName, []),
+    teardown(DbName);
+teardown(DbName) when is_binary(DbName) ->
+    ok = couch_server:delete(DbName, [?ADMIN_USER]),
     ok.
 
 
@@ -60,6 +80,40 @@ view_indexes_cleanup_test_() ->
         }
     }.
 
+
+should_not_remember_docs_in_index_after_backup_restore_test() ->
+    %% COUCHDB-640
+    start(),
+    DbName = setup_with_docs(),
+
+    {ok, Code0, _, Body0} = test_request:get(
+        db_url(DbName) ++ "/_design/foo/_view/bar"),
+    ?assertEqual(200, Code0),
+
+    ViewJson0 = ejson:decode(Body0),
+    Rows0 = couch_util:get_nested_json_value(ViewJson0, [<<"rows">>]),
+    ?assert(has_doc("doc1", Rows0)),
+    ?assert(has_doc("doc2", Rows0)),
+    ?assert(has_doc("doc3", Rows0)),
+    ?assert(has_doc("doc666", Rows0)),
+
+    restore_backup_db_file(DbName),
+
+    {ok, Code1, _, Body1} = test_request:get(
+        db_url(DbName) ++ "/_design/foo/_view/bar"),
+    ?assertEqual(200, Code1),
+
+    ViewJson1 = ejson:decode(Body1),
+    Rows1 = couch_util:get_nested_json_value(ViewJson1, [<<"rows">>]),
+    ?assert(has_doc("doc1", Rows1)),
+    ?assert(has_doc("doc2", Rows1)),
+    ?assert(has_doc("doc3", Rows1)),
+    ?assertNot(has_doc("doc666", Rows1)),
+
+    teardown(DbName),
+    stop(whereis(couch_server_sup)).
+
+
 should_have_two_indexes_alive_before_deletion({DbName, _}) ->
     view_cleanup(DbName),
     ?_assertEqual(2, count_index_files(DbName)).
@@ -77,7 +131,7 @@ should_cleanup_all_index_files({DbName, {FooRev, BooRev}})->
 
 
 create_design_doc(DbName, DDName, ViewName) ->
-    {ok, Db} = couch_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_USER]),
     DDoc = couch_doc:from_json_obj({[
         {<<"_id">>, DDName},
         {<<"language">>, <<"javascript">>},
@@ -93,7 +147,7 @@ create_design_doc(DbName, DDName, ViewName) ->
     Rev.
 
 delete_design_doc(DbName, DDName, Rev) ->
-    {ok, Db} = couch_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_USER]),
     DDoc = couch_doc:from_json_obj({[
         {<<"_id">>, DDName},
         {<<"_rev">>, couch_doc:rev_to_str(Rev)},
@@ -105,7 +159,7 @@ delete_design_doc(DbName, DDName, Rev) ->
 db_url(DbName) ->
     Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
     Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
-    "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ binary_to_list(DbName).
+    "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ ?b2l(DbName).
 
 query_view(DbName, DDoc, View) ->
     {ok, Code, _Headers, _Body} = test_request:get(
@@ -114,7 +168,7 @@ query_view(DbName, DDoc, View) ->
     ok.
 
 view_cleanup(DbName) ->
-    {ok, Db} = couch_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_USER]),
     couch_mrview:cleanup(Db),
     couch_db:close(Db).
 
@@ -123,3 +177,69 @@ count_index_files(DbName) ->
     RootDir = couch_config:get("couchdb", "view_index_dir"),
     length(filelib:wildcard(RootDir ++ "/." ++
         binary_to_list(DbName) ++ "_design"++"/mrview/*")).
+
+create_docs(DbName) ->
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_USER]),
+    Doc1 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc1">>},
+        {<<"value">>, 1}
+
+    ]}),
+    Doc2 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc2">>},
+        {<<"value">>, 2}
+
+    ]}),
+    Doc3 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc3">>},
+        {<<"value">>, 3}
+
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+create_new_doc(DbName) ->
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_USER]),
+    Doc666 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc666">>},
+        {<<"value">>, 999}
+
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc666]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+create_design_doc(DbName) ->
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_USER]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/foo">>},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"bar">>, {[
+                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [DDoc]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db).
+
+has_doc(DocId1, Rows) ->
+    DocId = iolist_to_binary(DocId1),
+    lists:any(fun({R}) -> lists:member({<<"id">>, DocId}, R) end, Rows).
+
+backup_db_file(DbName) ->
+    DbDir = couch_config:get("couchdb", "database_dir"),
+    DbFile = filename:join([DbDir, ?b2l(DbName) ++ ".couch"]),
+    {ok, _} = file:copy(DbFile, DbFile ++ ".backup"),
+    ok.
+
+restore_backup_db_file(DbName) ->
+    DbDir = couch_config:get("couchdb", "database_dir"),
+    stop(whereis(couch_server_sup)),
+    DbFile = filename:join([DbDir, ?b2l(DbName) ++ ".couch"]),
+    ok = file:delete(DbFile),
+    ok = file:rename(DbFile ++ ".backup", DbFile),
+    start(),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/79c69a9c/test/etap/150-invalid-view-seq.t
----------------------------------------------------------------------
diff --git a/test/etap/150-invalid-view-seq.t b/test/etap/150-invalid-view-seq.t
deleted file mode 100755
index 681875a..0000000
--- a/test/etap/150-invalid-view-seq.t
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-test_db_name() ->
-    <<"couch_test_invalid_view_seq">>.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(10),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-%% NOTE: since during the test we stop the server,
-%%       a huge and ugly but harmless stack trace is sent to stderr
-%%
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    timer:sleep(1000),
-    delete_db(),
-    create_db(),
-
-    create_docs(),
-    create_design_doc(),
-
-    % make DB file backup
-    backup_db_file(),
-
-    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-
-    create_new_doc(),
-    query_view_before_restore_backup(),
-
-    % restore DB file backup after querying view
-    restore_backup_db_file(),
-
-    query_view_after_restore_backup(),
-
-    delete_db(),
-    couch_server_sup:stop(),
-    ok.
-
-admin_user_ctx() ->
-    {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
-
-create_db() ->
-    {ok, _} = couch_db:create(test_db_name(), [admin_user_ctx()]).
-
-delete_db() ->
-    couch_server:delete(test_db_name(), [admin_user_ctx()]).
-
-create_docs() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    Doc1 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc1">>},
-        {<<"value">>, 1}
-
-    ]}),
-    Doc2 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc2">>},
-        {<<"value">>, 2}
-
-    ]}),
-    Doc3 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc3">>},
-        {<<"value">>, 3}
-
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [Doc1, Doc2, Doc3]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-create_design_doc() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/foo">>},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {<<"bar">>, {[
-                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [DDoc]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-backup_db_file() ->
-    DbFile = test_util:build_file("tmp/lib/" ++
-        binary_to_list(test_db_name()) ++ ".couch"),
-    {ok, _} = file:copy(DbFile, DbFile ++ ".backup"),
-    ok.
-
-create_new_doc() ->
-    {ok, Db} = couch_db:open(test_db_name(), [admin_user_ctx()]),
-    Doc666 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc666">>},
-        {<<"value">>, 999}
-
-    ]}),
-    {ok, _} = couch_db:update_docs(Db, [Doc666]),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db).
-
-db_url() ->
-    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
-    binary_to_list(test_db_name()).
-
-query_view_before_restore_backup() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/_design/foo/_view/bar", [], get),
-    etap:is(Code, 200, "Got view response before restoring backup."),
-    ViewJson = ejson:decode(Body),
-    Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
-    HasDoc1 = has_doc("doc1", Rows),
-    HasDoc2 = has_doc("doc2", Rows),
-    HasDoc3 = has_doc("doc3", Rows),
-    HasDoc666 = has_doc("doc666", Rows),
-    etap:is(HasDoc1, true, "Before backup restore, view has doc1"),
-    etap:is(HasDoc2, true, "Before backup restore, view has doc2"),
-    etap:is(HasDoc3, true, "Before backup restore, view has doc3"),
-    etap:is(HasDoc666, true, "Before backup restore, view has doc666"),
-    ok.
-
-has_doc(DocId1, Rows) ->
-    DocId = iolist_to_binary(DocId1),
-    lists:any(
-        fun({R}) -> lists:member({<<"id">>, DocId}, R) end,
-        Rows
-    ).
-
-restore_backup_db_file() ->
-    couch_server_sup:stop(),
-    timer:sleep(3000),
-    DbFile = test_util:build_file("tmp/lib/" ++
-        binary_to_list(test_db_name()) ++ ".couch"),
-    ok = file:delete(DbFile),
-    ok = file:rename(DbFile ++ ".backup", DbFile),
-    couch_server_sup:start_link(test_util:config_files()),
-    timer:sleep(1000),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-    ok.
-
-query_view_after_restore_backup() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/_design/foo/_view/bar", [], get),
-    etap:is(Code, 200, "Got view response after restoring backup."),
-    ViewJson = ejson:decode(Body),
-    Rows = couch_util:get_nested_json_value(ViewJson, [<<"rows">>]),
-    HasDoc1 = has_doc("doc1", Rows),
-    HasDoc2 = has_doc("doc2", Rows),
-    HasDoc3 = has_doc("doc3", Rows),
-    HasDoc666 = has_doc("doc666", Rows),
-    etap:is(HasDoc1, true, "After backup restore, view has doc1"),
-    etap:is(HasDoc2, true, "After backup restore, view has doc2"),
-    etap:is(HasDoc3, true, "After backup restore, view has doc3"),
-    etap:is(HasDoc666, false, "After backup restore, view does not have doc666"),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/79c69a9c/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 94ff6f2..bf9b3f2 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    150-invalid-view-seq.t \
     160-vhosts.t \
     170-os-daemons.es \
     170-os-daemons.t \


[26/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 100-ref-counter.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/5c95e8f2
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/5c95e8f2
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/5c95e8f2

Branch: refs/heads/1963-eunit
Commit: 5c95e8f2b6a4607325239a02bd03334135a4e56c
Parents: 0e0cde4
Author: Alexander Shorin <kx...@apache.org>
Authored: Mon May 26 20:59:45 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:20 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                 |   1 +
 test/couchdb/couch_ref_counter_tests.erl | 107 ++++++++++++++++++++++++
 test/etap/100-ref-counter.t              | 114 --------------------------
 test/etap/Makefile.am                    |   1 -
 4 files changed, 108 insertions(+), 115 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/5c95e8f2/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 7d10b89..0133601 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -35,6 +35,7 @@ eunit_files = \
     couchdb_file_compression_tests.erl \
     couch_config_tests.erl \
     couch_task_status_tests.erl \
+    couch_ref_counter_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5c95e8f2/test/couchdb/couch_ref_counter_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_ref_counter_tests.erl b/test/couchdb/couch_ref_counter_tests.erl
new file mode 100644
index 0000000..7400ac0
--- /dev/null
+++ b/test/couchdb/couch_ref_counter_tests.erl
@@ -0,0 +1,107 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_ref_counter_tests).
+
+-include("../../src/couchdb/couch_db.hrl").
+-include("couchdb_tests.hrl").
+
+-define(TIMEOUT, 1000).
+
+
+setup() ->
+    {ok, RefCtr} = couch_ref_counter:start([]),
+    ChildPid = spawn(fun() -> loop() end),
+    {RefCtr, ChildPid}.
+
+teardown({_, ChildPid}) ->
+    erlang:monitor(process, ChildPid),
+    ChildPid ! close,
+    wait().
+
+
+couch_ref_counter_test_() ->
+    {
+        "CouchDB reference counter tests",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_initialize_with_calling_process_as_referrer/1,
+                fun should_ignore_unknown_pid/1,
+                fun should_increment_counter_on_pid_add/1,
+                fun should_not_increase_counter_on_readding_same_pid/1,
+                fun should_drop_ref_for_double_added_pid/1,
+                fun should_decrement_counter_on_pid_drop/1,
+                fun should_add_after_drop/1,
+                fun should_decrement_counter_on_process_exit/1
+
+            ]
+        }
+    }.
+
+
+should_initialize_with_calling_process_as_referrer({RefCtr, _}) ->
+    ?_assertEqual(1, couch_ref_counter:count(RefCtr)).
+
+should_ignore_unknown_pid({RefCtr, ChildPid}) ->
+    ?_assertEqual(ok, couch_ref_counter:drop(RefCtr, ChildPid)).
+
+should_increment_counter_on_pid_add({RefCtr, ChildPid}) ->
+    couch_ref_counter:add(RefCtr, ChildPid),
+    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
+
+should_not_increase_counter_on_readding_same_pid({RefCtr, ChildPid}) ->
+    couch_ref_counter:add(RefCtr, ChildPid),
+    couch_ref_counter:add(RefCtr, ChildPid),
+    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
+
+should_drop_ref_for_double_added_pid({RefCtr, ChildPid}) ->
+    couch_ref_counter:add(RefCtr, ChildPid),
+    couch_ref_counter:add(RefCtr, ChildPid),
+    couch_ref_counter:drop(RefCtr, ChildPid),
+    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
+
+should_decrement_counter_on_pid_drop({RefCtr, ChildPid}) ->
+    couch_ref_counter:add(RefCtr, ChildPid),
+    couch_ref_counter:drop(RefCtr, ChildPid),
+    ?_assertEqual(1, couch_ref_counter:count(RefCtr)).
+
+should_add_after_drop({RefCtr, ChildPid}) ->
+    couch_ref_counter:add(RefCtr, ChildPid),
+    couch_ref_counter:drop(RefCtr, ChildPid),
+    couch_ref_counter:add(RefCtr, ChildPid),
+    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
+
+should_decrement_counter_on_process_exit({RefCtr, ChildPid}) ->
+    ?_assertEqual(1,
+        begin
+            couch_ref_counter:add(RefCtr, ChildPid),
+            erlang:monitor(process, ChildPid),
+            ChildPid ! close,
+            wait(),
+            couch_ref_counter:count(RefCtr)
+        end).
+
+
+loop() ->
+    receive
+        close -> ok
+    end.
+
+wait() ->
+    receive
+        {'DOWN', _, _, _, _} ->
+            ok
+    after ?TIMEOUT ->
+        throw(timeout_error)
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5c95e8f2/test/etap/100-ref-counter.t
----------------------------------------------------------------------
diff --git a/test/etap/100-ref-counter.t b/test/etap/100-ref-counter.t
deleted file mode 100755
index 8f996d0..0000000
--- a/test/etap/100-ref-counter.t
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(8),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-loop() ->
-    receive
-        close -> ok
-    end.
-
-wait() ->
-    receive
-        {'DOWN', _, _, _, _} -> ok
-    after 1000 ->
-        throw(timeout_error)
-    end.
-
-test() ->
-    {ok, RefCtr} = couch_ref_counter:start([]),
-
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        1,
-        "A ref_counter is initialized with the calling process as a referer."
-    ),
-
-    ChildPid1 = spawn(fun() -> loop() end),
-
-    % This is largely implicit in that nothing else breaks
-    % as ok is just returned from gen_server:cast()
-    etap:is(
-        couch_ref_counter:drop(RefCtr, ChildPid1),
-        ok,
-        "Dropping an unknown Pid is ignored."
-    ),
-
-    couch_ref_counter:add(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Adding a Pid to the ref_counter increases it's count."
-    ),
-
-    couch_ref_counter:add(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Readding the same Pid maintains the count but increments it's refs."
-    ),
-
-    couch_ref_counter:drop(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Droping the doubly added Pid only removes a ref, not a referer."
-    ),
-
-    couch_ref_counter:drop(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        1,
-        "Dropping the second ref drops the referer."
-    ),
-
-    couch_ref_counter:add(RefCtr, ChildPid1),
-    etap:is(
-        couch_ref_counter:count(RefCtr),
-        2,
-        "Sanity checking that the Pid was re-added."
-    ),
-
-    erlang:monitor(process, ChildPid1),
-    ChildPid1 ! close,
-    wait(),
-    
-    CheckFun = fun
-        (Iter, nil) ->
-            case couch_ref_counter:count(RefCtr) of
-                1 -> Iter;
-                _ -> nil
-            end;
-        (_, Acc) ->
-            Acc
-    end,
-    Result = lists:foldl(CheckFun, nil, lists:seq(1, 10000)),
-    etap:isnt(
-        Result,
-        nil,
-        "The referer count was decremented automatically on process exit."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5c95e8f2/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 4657656..2e38ee5 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    100-ref-counter.t \
     120-stats-collect.t \
     121-stats-aggregates.cfg \
     121-stats-aggregates.ini \


[28/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 121-stats-aggregates.t etap test suite to eunit

Merged into couch_stats_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/ac9379a9
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/ac9379a9
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/ac9379a9

Branch: refs/heads/1963-eunit
Commit: ac9379a95188892dc040893bd01090ecc7f6b83d
Parents: 39b0ec1
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue May 27 18:27:53 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:34 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                        |   4 +-
 test/couchdb/couch_stats_tests.erl              | 225 ++++++++++++++++++-
 .../couchdb/fixtures/couch_stats_aggregates.cfg |  19 ++
 .../couchdb/fixtures/couch_stats_aggregates.ini |  20 ++
 test/etap/121-stats-aggregates.cfg              |  19 --
 test/etap/121-stats-aggregates.ini              |  20 --
 test/etap/121-stats-aggregates.t                | 171 --------------
 test/etap/Makefile.am                           |   3 -
 8 files changed, 264 insertions(+), 217 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index afa8c56..c047aa4 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -42,7 +42,9 @@ eunit_files = \
 
 fixture_files = \
     fixtures/couch_config_tests_1.ini \
-    fixtures/couch_config_tests_2.ini
+    fixtures/couch_config_tests_2.ini \
+    fixtures/couch_stats_aggregates.cfg \
+    fixtures/couch_stats_aggregates.ini
 
 EXTRA_DIST = \
     run.in \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/couchdb/couch_stats_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_stats_tests.erl b/test/couchdb/couch_stats_tests.erl
index 2086d7b..6fe8ae6 100644
--- a/test/couchdb/couch_stats_tests.erl
+++ b/test/couchdb/couch_stats_tests.erl
@@ -15,18 +15,41 @@
 -include("../../src/couchdb/couch_db.hrl").
 -include("couchdb_tests.hrl").
 
+-define(STATS_CFG_FIXTURE,
+    filename:join([?FIXTURESDIR, "couch_stats_aggregates.cfg"])).
+-define(STATS_INI_FIXTURE,
+    filename:join([?FIXTURESDIR, "couch_stats_aggregates.ini"])).
 -define(TIMEOUT, 1000).
--define(SLEEPTIME, 100).
+-define(TIMEWAIT, 500).
 
 
 setup_collector() ->
     couch_stats_collector:start(),
     ok.
 
+setup_aggregator(_) ->
+    {ok, Pid} = couch_config:start_link([?STATS_INI_FIXTURE]),
+    {ok, _} = couch_stats_collector:start(),
+    {ok, _} = couch_stats_aggregator:start(?STATS_CFG_FIXTURE),
+    Pid.
+
 teardown_collector(_) ->
     couch_stats_collector:stop(),
     ok.
 
+teardown_aggregator(_, Pid) ->
+    couch_stats_aggregator:stop(),
+    couch_stats_collector:stop(),
+    erlang:monitor(process, Pid),
+    couch_config:stop(),
+    receive
+        {'DOWN', _, _, Pid, _} ->
+            ok
+    after ?TIMEOUT ->
+        throw({timeout, config_stop})
+    end,
+    ok.
+
 
 couch_stats_collector_test_() ->
     {
@@ -51,6 +74,39 @@ couch_stats_collector_test_() ->
         }
     }.
 
+couch_stats_aggregator_test_() ->
+    Funs = [
+        fun should_init_empty_aggregate/2,
+        fun should_get_empty_aggregate/2,
+        fun should_change_stats_on_values_add/2,
+        fun should_change_stats_for_all_times_on_values_add/2,
+        fun should_change_stats_on_values_change/2,
+        fun should_change_stats_for_all_times_on_values_change/2,
+        fun should_not_remove_data_after_some_time_for_0_sample/2,
+        fun should_remove_data_after_some_time_for_other_samples/2
+    ],
+    {
+        "CouchDB stats aggregator tests",
+        [
+            {
+                "Absolute values",
+                {
+                    foreachx,
+                    fun setup_aggregator/1, fun teardown_aggregator/2,
+                    [{absolute, Fun} || Fun <- Funs]
+                }
+            },
+            {
+                "Counters",
+                {
+                    foreachx,
+                    fun setup_aggregator/1, fun teardown_aggregator/2,
+                    [{counter, Fun} || Fun <- Funs]
+                }
+            }
+        ]
+    }.
+
 
 should_increment_counter() ->
     ?_assertEqual(100,
@@ -122,7 +178,7 @@ should_decrement_counter_on_process_exit() ->
             end,
             % sleep for awhile to let collector handle the updates
             % suddenly, it couldn't notice process death instantly
-            timer:sleep(?SLEEPTIME),
+            timer:sleep(?TIMEWAIT),
             couch_stats_collector:get(hoopla)
         end).
 
@@ -138,7 +194,7 @@ should_decrement_for_each_track_process_count_call_on_exit() ->
             after ?TIMEOUT ->
                 throw(timeout)
             end,
-            timer:sleep(?SLEEPTIME),
+            timer:sleep(?TIMEWAIT),
             couch_stats_collector:get(hoopla)
         end).
 
@@ -170,6 +226,158 @@ should_return_absolute_values() ->
             lists:sort(couch_stats_collector:all(absolute))
         end).
 
+should_init_empty_aggregate(absolute, _) ->
+    {Aggs} = couch_stats_aggregator:all(),
+    ?_assertEqual({[{'11', make_agg(<<"randomosity">>,
+                                    null, null, null, null, null)}]},
+                  couch_util:get_value(number, Aggs));
+should_init_empty_aggregate(counter, _) ->
+    {Aggs} = couch_stats_aggregator:all(),
+    ?_assertEqual({[{stuff, make_agg(<<"yay description">>,
+                                     null, null, null, null, null)}]},
+                  couch_util:get_value(testing, Aggs)).
+
+should_get_empty_aggregate(absolute, _) ->
+    ?_assertEqual(make_agg(<<"randomosity">>, null, null, null, null, null),
+             couch_stats_aggregator:get_json({number, '11'}));
+should_get_empty_aggregate(counter, _) ->
+    ?_assertEqual(make_agg(<<"yay description">>, null, null, null, null, null),
+             couch_stats_aggregator:get_json({testing, stuff})).
+
+should_change_stats_on_values_add(absolute, _) ->
+    lists:foreach(fun(X) ->
+        couch_stats_collector:record({number, 11}, X)
+    end, lists:seq(0, 10)),
+    couch_stats_aggregator:collect_sample(),
+    ?_assertEqual(make_agg(<<"randomosity">>, 5.0, 5.0, null, 5.0, 5.0),
+                  couch_stats_aggregator:get_json({number, 11}));
+should_change_stats_on_values_add(counter, _) ->
+    lists:foreach(fun(_) ->
+        couch_stats_collector:increment({testing, stuff})
+    end, lists:seq(1, 100)),
+    couch_stats_aggregator:collect_sample(),
+    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 100.0, null, 100, 100),
+                  couch_stats_aggregator:get_json({testing, stuff})).
+
+should_change_stats_for_all_times_on_values_add(absolute, _) ->
+    lists:foreach(fun(X) ->
+        couch_stats_collector:record({number, 11}, X)
+    end, lists:seq(0, 10)),
+    couch_stats_aggregator:collect_sample(),
+    ?_assertEqual(make_agg(<<"randomosity">>, 5.0, 5.0, null, 5.0, 5.0),
+                  couch_stats_aggregator:get_json({number, 11}, 1));
+should_change_stats_for_all_times_on_values_add(counter, _) ->
+    lists:foreach(fun(_) ->
+        couch_stats_collector:increment({testing, stuff})
+    end, lists:seq(1, 100)),
+    couch_stats_aggregator:collect_sample(),
+    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 100.0, null, 100, 100),
+                  couch_stats_aggregator:get_json({testing, stuff}, 1)).
+
+should_change_stats_on_values_change(absolute, _) ->
+    ?_assertEqual(make_agg(<<"randomosity">>, 20.0, 10.0, 7.071, 5.0, 15.0),
+        begin
+            lists:foreach(fun(X) ->
+                couch_stats_collector:record({number, 11}, X)
+            end, lists:seq(0, 10)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_collector:record({number, 11}, 15),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({number, 11})
+        end);
+should_change_stats_on_values_change(counter, _) ->
+    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 50.0, 70.711, 0, 100),
+        begin
+            lists:foreach(fun(_) ->
+                couch_stats_collector:increment({testing, stuff})
+            end, lists:seq(1, 100)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({testing, stuff})
+        end).
+
+should_change_stats_for_all_times_on_values_change(absolute, _) ->
+    ?_assertEqual(make_agg(<<"randomosity">>, 20.0, 10.0, 7.071, 5.0, 15.0),
+        begin
+            lists:foreach(fun(X) ->
+                couch_stats_collector:record({number, 11}, X)
+            end, lists:seq(0, 10)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_collector:record({number, 11}, 15),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({number, 11}, 1)
+        end);
+should_change_stats_for_all_times_on_values_change(counter, _) ->
+    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 50.0, 70.711, 0, 100),
+        begin
+            lists:foreach(fun(_) ->
+                couch_stats_collector:increment({testing, stuff})
+            end, lists:seq(1, 100)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({testing, stuff}, 1)
+        end).
+
+should_not_remove_data_after_some_time_for_0_sample(absolute, _) ->
+    ?_assertEqual(make_agg(<<"randomosity">>, 20.0, 10.0, 7.071, 5.0, 15.0),
+        begin
+            lists:foreach(fun(X) ->
+                couch_stats_collector:record({number, 11}, X)
+            end, lists:seq(0, 10)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_collector:record({number, 11}, 15),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({number, 11})
+        end);
+should_not_remove_data_after_some_time_for_0_sample(counter, _) ->
+    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 33.333, 57.735, 0, 100),
+        begin
+            lists:foreach(fun(_) ->
+                couch_stats_collector:increment({testing, stuff})
+            end, lists:seq(1, 100)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({testing, stuff})
+        end).
+
+should_remove_data_after_some_time_for_other_samples(absolute, _) ->
+    ?_assertEqual(make_agg(<<"randomosity">>, 15.0, 15.0, null, 15.0, 15.0),
+        begin
+            lists:foreach(fun(X) ->
+                couch_stats_collector:record({number, 11}, X)
+            end, lists:seq(0, 10)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_collector:record({number, 11}, 15),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({number, 11}, 1)
+        end);
+should_remove_data_after_some_time_for_other_samples(counter, _) ->
+    ?_assertEqual(make_agg(<<"yay description">>, 0, 0.0, 0.0, 0, 0),
+        begin
+            lists:foreach(fun(_) ->
+                couch_stats_collector:increment({testing, stuff})
+            end, lists:seq(1, 100)),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            timer:sleep(?TIMEWAIT),
+            couch_stats_aggregator:collect_sample(),
+            couch_stats_aggregator:get_json({testing, stuff}, 1)
+        end).
+
 
 spawn_and_count(N) ->
     Self = self(),
@@ -191,3 +399,14 @@ repeat(_, 0) ->
 repeat(Fun, Count) ->
     Fun(),
     repeat(Fun, Count-1).
+
+make_agg(Desc, Sum, Mean, StdDev, Min, Max) ->
+    {[
+        {description, Desc},
+        {current, Sum},
+        {sum, Sum},
+        {mean, Mean},
+        {stddev, StdDev},
+        {min, Min},
+        {max, Max}
+    ]}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/couchdb/fixtures/couch_stats_aggregates.cfg
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/couch_stats_aggregates.cfg b/test/couchdb/fixtures/couch_stats_aggregates.cfg
new file mode 100644
index 0000000..30e475d
--- /dev/null
+++ b/test/couchdb/fixtures/couch_stats_aggregates.cfg
@@ -0,0 +1,19 @@
+% Licensed to the Apache Software Foundation (ASF) under one
+% or more contributor license agreements.  See the NOTICE file
+% distributed with this work for additional information
+% regarding copyright ownership.  The ASF licenses this file
+% to you under the Apache License, Version 2.0 (the
+% "License"); you may not use this file except in compliance
+% with the License.  You may obtain a copy of the License at
+% 
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing,
+% software distributed under the License is distributed on an
+% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+% KIND, either express or implied.  See the License for the
+% specific language governing permissions and limitations
+% under the License.
+
+{testing, stuff, "yay description"}.
+{number, '11', "randomosity"}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/couchdb/fixtures/couch_stats_aggregates.ini
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/couch_stats_aggregates.ini b/test/couchdb/fixtures/couch_stats_aggregates.ini
new file mode 100644
index 0000000..cc5cd21
--- /dev/null
+++ b/test/couchdb/fixtures/couch_stats_aggregates.ini
@@ -0,0 +1,20 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[stats]
+rate = 10000000 ; We call collect_sample in testing
+samples = [0, 1]

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/etap/121-stats-aggregates.cfg
----------------------------------------------------------------------
diff --git a/test/etap/121-stats-aggregates.cfg b/test/etap/121-stats-aggregates.cfg
deleted file mode 100644
index 30e475d..0000000
--- a/test/etap/121-stats-aggregates.cfg
+++ /dev/null
@@ -1,19 +0,0 @@
-% Licensed to the Apache Software Foundation (ASF) under one
-% or more contributor license agreements.  See the NOTICE file
-% distributed with this work for additional information
-% regarding copyright ownership.  The ASF licenses this file
-% to you under the Apache License, Version 2.0 (the
-% "License"); you may not use this file except in compliance
-% with the License.  You may obtain a copy of the License at
-% 
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing,
-% software distributed under the License is distributed on an
-% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-% KIND, either express or implied.  See the License for the
-% specific language governing permissions and limitations
-% under the License.
-
-{testing, stuff, "yay description"}.
-{number, '11', "randomosity"}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/etap/121-stats-aggregates.ini
----------------------------------------------------------------------
diff --git a/test/etap/121-stats-aggregates.ini b/test/etap/121-stats-aggregates.ini
deleted file mode 100644
index cc5cd21..0000000
--- a/test/etap/121-stats-aggregates.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[stats]
-rate = 10000000 ; We call collect_sample in testing
-samples = [0, 1]

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/etap/121-stats-aggregates.t
----------------------------------------------------------------------
diff --git a/test/etap/121-stats-aggregates.t b/test/etap/121-stats-aggregates.t
deleted file mode 100755
index d678aa9..0000000
--- a/test/etap/121-stats-aggregates.t
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-ini_file() ->
-    test_util:source_file("test/etap/121-stats-aggregates.ini").
-
-cfg_file() ->
-    test_util:source_file("test/etap/121-stats-aggregates.cfg").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(17),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link([ini_file()]),
-    couch_stats_collector:start(),
-    couch_stats_aggregator:start(cfg_file()),
-    ok = test_all_empty(),
-    ok = test_get_empty(),
-    ok = test_count_stats(),
-    ok = test_abs_stats(),
-    ok.
-
-test_all_empty() ->
-    {Aggs} = couch_stats_aggregator:all(),
-
-    etap:is(length(Aggs), 2, "There are only two aggregate types in testing."),
-    etap:is(
-        couch_util:get_value(testing, Aggs),
-        {[{stuff, make_agg(<<"yay description">>,
-            null, null, null, null, null)}]},
-        "{testing, stuff} is empty at start."
-    ),
-    etap:is(
-        couch_util:get_value(number, Aggs),
-        {[{'11', make_agg(<<"randomosity">>,
-            null, null, null, null, null)}]},
-        "{number, '11'} is empty at start."
-    ),
-    ok.
-    
-test_get_empty() ->
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, null, null, null, null, null),
-        "Getting {testing, stuff} returns an empty aggregate."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, '11'}),
-        make_agg(<<"randomosity">>, null, null, null, null, null),
-        "Getting {number, '11'} returns an empty aggregate."
-    ),
-    ok.
-
-test_count_stats() ->
-    lists:foreach(fun(_) ->
-        couch_stats_collector:increment({testing, stuff})
-    end, lists:seq(1, 100)),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, 100, 100, null, 100, 100),
-        "COUNT: Adding values changes the stats."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}, 1),
-        make_agg(<<"yay description">>, 100, 100, null, 100, 100),
-        "COUNT: Adding values changes stats for all times."
-    ),
-
-    timer:sleep(500),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100),
-        "COUNT: Removing values changes stats."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}, 1),
-        make_agg(<<"yay description">>, 100, 50, 70.711, 0, 100),
-        "COUNT: Removing values changes stats for all times."
-    ),
-
-    timer:sleep(600),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}),
-        make_agg(<<"yay description">>, 100, 33.333, 57.735, 0, 100),
-        "COUNT: Letting time passes doesn't remove data from time 0 aggregates"
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({testing, stuff}, 1),
-        make_agg(<<"yay description">>, 0, 0, 0, 0, 0),
-        "COUNT: Letting time pass removes data from other time aggregates."
-    ),
-    ok.
-
-test_abs_stats() ->
-    lists:foreach(fun(X) ->
-        couch_stats_collector:record({number, 11}, X)
-    end, lists:seq(0, 10)),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}),
-        make_agg(<<"randomosity">>, 5, 5, null, 5, 5),
-        "ABS: Adding values changes the stats."
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}, 1),
-        make_agg(<<"randomosity">>, 5, 5, null, 5, 5),
-        "ABS: Adding values changes stats for all times."
-    ),
-
-    timer:sleep(500),
-    couch_stats_collector:record({number, 11}, 15),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}),
-        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
-        "ABS: New values changes stats"
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}, 1),
-        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
-        "ABS: Removing values changes stats for all times."
-    ),
-
-    timer:sleep(600),
-    couch_stats_aggregator:collect_sample(),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}),
-        make_agg(<<"randomosity">>, 20, 10, 7.071, 5, 15),
-        "ABS: Letting time passes doesn't remove data from time 0 aggregates"
-    ),
-    etap:is(
-        couch_stats_aggregator:get_json({number, 11}, 1),
-        make_agg(<<"randomosity">>, 15, 15, null, 15, 15),
-        "ABS: Letting time pass removes data from other time aggregates."
-    ),
-    ok.
-
-make_agg(Desc, Sum, Mean, StdDev, Min, Max) ->
-    {[
-        {description, Desc},
-        {current, Sum},
-        {sum, Sum},
-        {mean, Mean},
-        {stddev, StdDev},
-        {min, Min},
-        {max, Max}
-    ]}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ac9379a9/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 216aa78..abe252d 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,9 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    121-stats-aggregates.cfg \
-    121-stats-aggregates.ini \
-    121-stats-aggregates.t \
     130-attachments-md5.t \
     140-attachment-comp.t \
     150-invalid-view-seq.t \


[10/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 06[0-5]-kt-*.t etap test suites to eunit

All merged into single suite since they're test the same module.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/5f6a0b66
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/5f6a0b66
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/5f6a0b66

Branch: refs/heads/1963-eunit
Commit: 5f6a0b6699437512ccd980edf91235856c7bb989
Parents: 11e4507
Author: Alexander Shorin <kx...@apache.org>
Authored: Sun May 18 14:53:03 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am              |   1 +
 test/couchdb/couch_key_tree_tests.erl | 381 +++++++++++++++++++++++++++++
 test/etap/060-kt-merging.t            | 176 -------------
 test/etap/061-kt-missing-leaves.t     |  65 -----
 test/etap/062-kt-remove-leaves.t      |  69 ------
 test/etap/063-kt-get-leaves.t         |  98 --------
 test/etap/064-kt-counting.t           |  46 ----
 test/etap/065-kt-stemming.t           |  42 ----
 test/etap/Makefile.am                 |   6 -
 9 files changed, 382 insertions(+), 502 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 948ba7c..396a36b 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -25,6 +25,7 @@ eunit_files = \
     couch_uuids_tests.erl \
     couch_work_queue_tests.erl \
     couch_stream_tests.erl \
+    couch_key_tree_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/couchdb/couch_key_tree_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_key_tree_tests.erl b/test/couchdb/couch_key_tree_tests.erl
new file mode 100644
index 0000000..7429f53
--- /dev/null
+++ b/test/couchdb/couch_key_tree_tests.erl
@@ -0,0 +1,381 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_key_tree_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+-define(DEPTH, 10).
+
+
+key_tree_merge_test_()->
+    {
+        "Key tree merge",
+        [
+            should_merge_with_empty_tree(),
+            should_merge_reflexive(),
+            should_merge_prefix_of_a_tree_with_tree(),
+            should_produce_conflict_on_merge_with_unrelated_branch(),
+            should_merge_reflexive_for_child_nodes(),
+            should_merge_tree_to_itself(),
+            should_merge_tree_of_odd_length(),
+            should_merge_tree_with_stem(),
+            should_merge_with_stem_at_deeper_level(),
+            should_merge_with_stem_at_deeper_level_with_deeper_paths(),
+            should_merge_single_tree_with_deeper_stem(),
+            should_merge_tree_with_large_stem(),
+            should_merge_stems(),
+            should_create_conflicts_on_merge(),
+            should_create_no_conflicts_on_merge(),
+            should_ignore_conflicting_branch()
+        ]
+    }.
+
+key_tree_missing_leaves_test_()->
+    {
+        "Missing tree leaves",
+        [
+            should_not_find_missing_leaves(),
+            should_find_missing_leaves()
+        ]
+    }.
+
+key_tree_remove_leaves_test_()->
+    {
+        "Remove tree leaves",
+        [
+            should_have_no_effect_on_removing_no_leaves(),
+            should_have_no_effect_on_removing_non_existant_branch(),
+            should_remove_leaf(),
+            should_produce_empty_tree_on_removing_all_leaves(),
+            should_have_no_effect_on_removing_non_existant_node(),
+            should_produce_empty_tree_on_removing_last_leaf()
+        ]
+    }.
+
+key_tree_get_leaves_test_()->
+    {
+        "Leaves retrieving",
+        [
+            should_extract_subtree(),
+            should_extract_subsubtree(),
+            should_gather_non_existant_leaf(),
+            should_gather_leaf(),
+            shoul_gather_multiple_leaves(),
+            should_retrieve_full_key_path(),
+            should_retrieve_full_key_path_for_node(),
+            should_retrieve_leaves_with_parent_node(),
+            should_retrieve_all_leaves()
+        ]
+    }.
+
+key_tree_leaf_counting_test_()->
+    {
+        "Leaf counting",
+        [
+            should_have_no_leaves_for_empty_tree(),
+            should_have_single_leaf_for_tree_with_single_node(),
+            should_have_two_leaves_for_tree_with_chindler_siblings(),
+            should_not_affect_on_leaf_counting_for_stemmed_tree()
+        ]
+    }.
+
+key_tree_stemming_test_()->
+    {
+        "Stemming",
+        [
+            should_have_no_effect_for_stemming_more_levels_than_exists(),
+            should_return_one_deepest_node(),
+            should_return_two_deepest_nodes()
+        ]
+    }.
+
+
+should_merge_with_empty_tree()->
+    One = {1, {"1","foo",[]}},
+    ?_assertMatch({[One], no_conflicts},
+                  couch_key_tree:merge([], One, ?DEPTH)).
+
+should_merge_reflexive()->
+    One = {1, {"1","foo",[]}},
+    ?_assertMatch({[One], no_conflicts},
+                  couch_key_tree:merge([One], One, ?DEPTH)).
+
+should_merge_prefix_of_a_tree_with_tree()->
+    One = {1, {"1","foo",[]}},
+    TwoSibs = [{1, {"1","foo",[]}},
+               {1, {"2","foo",[]}}],
+    ?_assertMatch({TwoSibs, no_conflicts},
+                  couch_key_tree:merge(TwoSibs, One, ?DEPTH)).
+
+should_produce_conflict_on_merge_with_unrelated_branch()->
+    TwoSibs = [{1, {"1","foo",[]}},
+               {1, {"2","foo",[]}}],
+    Three = {1, {"3","foo",[]}},
+    ThreeSibs = [{1, {"1","foo",[]}},
+                 {1, {"2","foo",[]}},
+                 {1, {"3","foo",[]}}],
+    ?_assertMatch({ThreeSibs, conflicts},
+                  couch_key_tree:merge(TwoSibs, Three, ?DEPTH)).
+
+should_merge_reflexive_for_child_nodes()->
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+    ?_assertMatch({[TwoChild], no_conflicts},
+                  couch_key_tree:merge([TwoChild], TwoChild, ?DEPTH)).
+
+should_merge_tree_to_itself()->
+    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
+                                    {"1b", "bar", []}]}},
+    ?_assertMatch({[TwoChildSibs], no_conflicts},
+                  couch_key_tree:merge([TwoChildSibs], TwoChildSibs, ?DEPTH)).
+
+should_merge_tree_of_odd_length()->
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
+                                    {"1b", "bar", []}]}},
+    TwoChildPlusSibs = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
+                                        {"1b", "bar", []}]}},
+
+    ?_assertMatch({[TwoChildPlusSibs], no_conflicts},
+                  couch_key_tree:merge([TwoChild], TwoChildSibs, ?DEPTH)).
+
+should_merge_tree_with_stem()->
+    Stemmed = {2, {"1a", "bar", []}},
+    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
+                                    {"1b", "bar", []}]}},
+
+    ?_assertMatch({[TwoChildSibs], no_conflicts},
+                  couch_key_tree:merge([TwoChildSibs], Stemmed, ?DEPTH)).
+
+should_merge_with_stem_at_deeper_level()->
+    Stemmed = {3, {"1bb", "boo", []}},
+    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
+                                    {"1b", "bar", [{"1bb", "boo", []}]}]}},
+    ?_assertMatch({[TwoChildSibs], no_conflicts},
+                  couch_key_tree:merge([TwoChildSibs], Stemmed, ?DEPTH)).
+
+should_merge_with_stem_at_deeper_level_with_deeper_paths()->
+    Stemmed = {3, {"1bb", "boo", []}},
+    StemmedTwoChildSibs = [{2,{"1a", "bar", []}},
+                           {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
+    ?_assertMatch({StemmedTwoChildSibs, no_conflicts},
+                  couch_key_tree:merge(StemmedTwoChildSibs, Stemmed, ?DEPTH)).
+
+should_merge_single_tree_with_deeper_stem()->
+    Stemmed = {3, {"1aa", "bar", []}},
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+    ?_assertMatch({[TwoChild], no_conflicts},
+                  couch_key_tree:merge([TwoChild], Stemmed, ?DEPTH)).
+
+should_merge_tree_with_large_stem()->
+    Stemmed = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+    ?_assertMatch({[TwoChild], no_conflicts},
+                  couch_key_tree:merge([TwoChild], Stemmed, ?DEPTH)).
+
+should_merge_stems()->
+    StemmedA = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
+    StemmedB = {3, {"1aa", "bar", []}},
+    ?_assertMatch({[StemmedA], no_conflicts},
+                  couch_key_tree:merge([StemmedA], StemmedB, ?DEPTH)).
+
+should_create_conflicts_on_merge()->
+    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
+    Stemmed = {3, {"1aa", "bar", []}},
+    ?_assertMatch({[OneChild, Stemmed], conflicts},
+                  couch_key_tree:merge([OneChild], Stemmed, ?DEPTH)).
+
+should_create_no_conflicts_on_merge()->
+    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
+    Stemmed = {3, {"1aa", "bar", []}},
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+    ?_assertMatch({[TwoChild], no_conflicts},
+                  couch_key_tree:merge([OneChild, Stemmed], TwoChild, ?DEPTH)).
+
+should_ignore_conflicting_branch()->
+    %% this test is based on couch-902-test-case2.py
+    %% foo has conflicts from replication at depth two
+    %% foo3 is the current value
+    Foo = {1, {"foo",
+               "val1",
+               [{"foo2","val2",[]},
+                {"foo3", "val3", []}
+               ]}},
+    %% foo now has an attachment added, which leads to foo4 and val4
+    %% off foo3
+    Bar = {1, {"foo",
+               [],
+               [{"foo3",
+                 [],
+                 [{"foo4","val4",[]}
+                  ]}]}},
+    %% this is what the merge returns
+    %% note that it ignore the conflicting branch as there's no match
+    FooBar = {1, {"foo",
+               "val1",
+               [{"foo2","val2",[]},
+                {"foo3", "val3", [{"foo4","val4",[]}]}
+               ]}},
+    {
+        "COUCHDB-902",
+        ?_assertMatch({[FooBar], no_conflicts},
+                      couch_key_tree:merge([Foo], Bar, ?DEPTH))
+    }.
+
+should_not_find_missing_leaves()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch([],
+                  couch_key_tree:find_missing(TwoChildSibs,
+                                              [{0,"1"}, {1,"1a"}])).
+
+should_find_missing_leaves()->
+    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    Stemmed2 = [{2, {"1aa", "bar", []}}],
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    [
+        ?_assertMatch(
+            [{0, "10"}, {100, "x"}],
+            couch_key_tree:find_missing(
+                TwoChildSibs,
+                [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}])),
+        ?_assertMatch(
+            [{0, "1"}, {100, "x"}],
+            couch_key_tree:find_missing(
+                Stemmed1,
+                [{0,"1"}, {1,"1a"}, {100, "x"}])),
+        ?_assertMatch(
+            [{0, "1"}, {1,"1a"}, {100, "x"}],
+            couch_key_tree:find_missing(
+                Stemmed2,
+                [{0,"1"}, {1,"1a"}, {100, "x"}]))
+    ].
+
+should_have_no_effect_on_removing_no_leaves()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({TwoChildSibs, []},
+                  couch_key_tree:remove_leafs(TwoChildSibs,
+                                              [])).
+
+should_have_no_effect_on_removing_non_existant_branch()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({TwoChildSibs, []},
+                  couch_key_tree:remove_leafs(TwoChildSibs,
+                                              [{0, "1"}])).
+
+should_remove_leaf()->
+    OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({OneChild, [{1, "1b"}]},
+                  couch_key_tree:remove_leafs(TwoChildSibs,
+                                              [{1, "1b"}])).
+
+should_produce_empty_tree_on_removing_all_leaves()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[], [{1, "1b"}, {1, "1a"}]},
+                  couch_key_tree:remove_leafs(TwoChildSibs,
+                                              [{1, "1b"}, {1, "1a"}])).
+
+should_have_no_effect_on_removing_non_existant_node()->
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    ?_assertMatch({Stemmed, []},
+                  couch_key_tree:remove_leafs(Stemmed,
+                                              [{1, "1a"}])).
+
+should_produce_empty_tree_on_removing_last_leaf()->
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    ?_assertMatch({[], [{2, "1aa"}]},
+                  couch_key_tree:remove_leafs(Stemmed,
+                                              [{2, "1aa"}])).
+
+should_extract_subtree()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[{"foo", {0, ["1"]}}],[]},
+                  couch_key_tree:get(TwoChildSibs, [{0, "1"}])).
+
+should_extract_subsubtree()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[{"bar", {1, ["1a", "1"]}}],[]},
+                  couch_key_tree:get(TwoChildSibs, [{1, "1a"}])).
+
+should_gather_non_existant_leaf()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[],[{0, "x"}]},
+                  couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}])).
+
+should_gather_leaf()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[{"bar", {1, ["1a","1"]}}],[]},
+                  couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}])).
+
+shoul_gather_multiple_leaves()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
+                  couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}])).
+
+should_retrieve_full_key_path()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[{0,[{"1", "foo"}]}],[]},
+                  couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}])).
+
+should_retrieve_full_key_path_for_node()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertMatch({[{1,[{"1a", "bar"},{"1", "foo"}]}],[]},
+                  couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}])).
+
+should_retrieve_leaves_with_parent_node()->
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    [
+        ?_assertMatch([{2, [{"1aa", "bar"},{"1a", "bar"}]}],
+                      couch_key_tree:get_all_leafs_full(Stemmed)),
+        ?_assertMatch([{1, [{"1a", "bar"},{"1", "foo"}]},
+                       {1, [{"1b", "bar"},{"1", "foo"}]}],
+                      couch_key_tree:get_all_leafs_full(TwoChildSibs))
+    ].
+
+should_retrieve_all_leaves()->
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    [
+        ?_assertMatch([{"bar", {2, ["1aa","1a"]}}],
+                      couch_key_tree:get_all_leafs(Stemmed)),
+        ?_assertMatch([{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}],
+                      couch_key_tree:get_all_leafs(TwoChildSibs))
+    ].
+
+should_have_no_leaves_for_empty_tree()->
+    ?_assertEqual(0, couch_key_tree:count_leafs([])).
+
+should_have_single_leaf_for_tree_with_single_node()->
+    ?_assertEqual(1, couch_key_tree:count_leafs([{0, {"1","foo",[]}}])).
+
+should_have_two_leaves_for_tree_with_chindler_siblings()->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ?_assertEqual(2, couch_key_tree:count_leafs(TwoChildSibs)).
+
+should_not_affect_on_leaf_counting_for_stemmed_tree()->
+    ?_assertEqual(1, couch_key_tree:count_leafs([{2, {"1bb", "boo", []}}])).
+
+should_have_no_effect_for_stemming_more_levels_than_exists()->
+    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+    ?_assertMatch(TwoChild, couch_key_tree:stem(TwoChild, 3)).
+
+should_return_one_deepest_node()->
+    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+    Stemmed = [{2, {"1aa", "bar", []}}],
+    ?_assertMatch(Stemmed, couch_key_tree:stem(TwoChild, 1)).
+
+should_return_two_deepest_nodes()->
+    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
+    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
+    ?_assertMatch(Stemmed, couch_key_tree:stem(TwoChild, 2)).
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/060-kt-merging.t
----------------------------------------------------------------------
diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t
deleted file mode 100755
index efbdbf6..0000000
--- a/test/etap/060-kt-merging.t
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(16),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    One = {1, {"1","foo",[]}},
-
-    etap:is(
-        {[One], no_conflicts},
-        couch_key_tree:merge([], One, 10),
-        "The empty tree is the identity for merge."
-    ),
-    etap:is(
-        {[One], no_conflicts},
-        couch_key_tree:merge([One], One, 10),
-        "Merging is reflexive."
-    ),
-
-    TwoSibs = [{1, {"1","foo",[]}},
-               {1, {"2","foo",[]}}],
-
-    etap:is(
-        {TwoSibs, no_conflicts},
-        couch_key_tree:merge(TwoSibs, One, 10),
-        "Merging a prefix of a tree with the tree yields the tree."
-    ),
-
-    Three = {1, {"3","foo",[]}},
-    ThreeSibs = [{1, {"1","foo",[]}},
-                 {1, {"2","foo",[]}},
-                 {1, {"3","foo",[]}}],
-
-    etap:is(
-        {ThreeSibs, conflicts},
-        couch_key_tree:merge(TwoSibs, Three, 10),
-        "Merging a third unrelated branch leads to a conflict."
-    ),
-
-
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge([TwoChild], TwoChild, 10),
-        "Merging two children is still reflexive."
-    ),
-
-    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
-                                     {"1b", "bar", []}]}},
-    etap:is(
-        {[TwoChildSibs], no_conflicts},
-        couch_key_tree:merge([TwoChildSibs], TwoChildSibs, 10),
-        "Merging a tree to itself is itself."),
-
-    TwoChildPlusSibs =
-        {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
-                         {"1b", "bar", []}]}},
-
-    etap:is(
-        {[TwoChildPlusSibs], no_conflicts},
-        couch_key_tree:merge([TwoChild], TwoChildSibs, 10),
-        "Merging tree of uneven length at node 2."),
-
-    Stemmed1b = {2, {"1a", "bar", []}},
-    etap:is(
-        {[TwoChildSibs], no_conflicts},
-        couch_key_tree:merge([TwoChildSibs], Stemmed1b, 10),
-        "Merging a tree with a stem."
-    ),
-
-    TwoChildSibs2 = {1, {"1","foo", [{"1a", "bar", []},
-                                     {"1b", "bar", [{"1bb", "boo", []}]}]}},
-    Stemmed1bb = {3, {"1bb", "boo", []}},
-    etap:is(
-        {[TwoChildSibs2], no_conflicts},
-        couch_key_tree:merge([TwoChildSibs2], Stemmed1bb, 10),
-        "Merging a stem at a deeper level."
-    ),
-
-    StemmedTwoChildSibs2 = [{2,{"1a", "bar", []}},
-                            {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
-
-    etap:is(
-        {StemmedTwoChildSibs2, no_conflicts},
-        couch_key_tree:merge(StemmedTwoChildSibs2, Stemmed1bb, 10),
-        "Merging a stem at a deeper level against paths at deeper levels."
-    ),
-
-    Stemmed1aa = {3, {"1aa", "bar", []}},
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge([TwoChild], Stemmed1aa, 10),
-        "Merging a single tree with a deeper stem."
-    ),
-
-    Stemmed1a = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge([TwoChild], Stemmed1a, 10),
-        "Merging a larger stem."
-    ),
-
-    etap:is(
-        {[Stemmed1a], no_conflicts},
-        couch_key_tree:merge([Stemmed1a], Stemmed1aa, 10),
-        "More merging."
-    ),
-
-    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
-    Expect1 = [OneChild, Stemmed1aa],
-    etap:is(
-        {Expect1, conflicts},
-        couch_key_tree:merge([OneChild], Stemmed1aa, 10),
-        "Merging should create conflicts."
-    ),
-
-    etap:is(
-        {[TwoChild], no_conflicts},
-        couch_key_tree:merge(Expect1, TwoChild, 10),
-        "Merge should have no conflicts."
-    ),
-
-    %% this test is based on couch-902-test-case2.py
-    %% foo has conflicts from replication at depth two
-    %% foo3 is the current value
-    Foo = {1, {"foo",
-               "val1",
-               [{"foo2","val2",[]},
-                {"foo3", "val3", []}
-               ]}},
-    %% foo now has an attachment added, which leads to foo4 and val4
-    %% off foo3
-    Bar = {1, {"foo",
-               [],
-               [{"foo3",
-                 [],
-                 [{"foo4","val4",[]}
-                  ]}]}},
-    %% this is what the merge returns
-    %% note that it ignore the conflicting branch as there's no match
-    FooBar = {1, {"foo",
-               "val1",
-               [{"foo2","val2",[]},
-                {"foo3", "val3", [{"foo4","val4",[]}]}
-               ]}},
-
-    etap:is(
-      {[FooBar], no_conflicts},
-      couch_key_tree:merge([Foo],Bar,10),
-      "Merging trees with conflicts ought to behave."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/061-kt-missing-leaves.t
----------------------------------------------------------------------
diff --git a/test/etap/061-kt-missing-leaves.t b/test/etap/061-kt-missing-leaves.t
deleted file mode 100755
index d60b4db..0000000
--- a/test/etap/061-kt-missing-leaves.t
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(4),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    Stemmed2 = [{2, {"1aa", "bar", []}}],
-
-    etap:is(
-        [],
-        couch_key_tree:find_missing(TwoChildSibs, [{0,"1"}, {1,"1a"}]),
-        "Look for missing keys."
-    ),
-
-    etap:is(
-        [{0, "10"}, {100, "x"}],
-        couch_key_tree:find_missing(
-            TwoChildSibs,
-            [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}]
-        ),
-        "Look for missing keys."
-    ),
-
-    etap:is(
-        [{0, "1"}, {100, "x"}],
-        couch_key_tree:find_missing(
-            Stemmed1,
-            [{0,"1"}, {1,"1a"}, {100, "x"}]
-        ),
-        "Look for missing keys."
-    ),
-    etap:is(
-        [{0, "1"}, {1,"1a"}, {100, "x"}],
-        couch_key_tree:find_missing(
-            Stemmed2,
-            [{0,"1"}, {1,"1a"}, {100, "x"}]
-        ),
-        "Look for missing keys."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/062-kt-remove-leaves.t
----------------------------------------------------------------------
diff --git a/test/etap/062-kt-remove-leaves.t b/test/etap/062-kt-remove-leaves.t
deleted file mode 100755
index 745a00b..0000000
--- a/test/etap/062-kt-remove-leaves.t
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(6),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-
-    etap:is(
-        {TwoChildSibs, []},
-        couch_key_tree:remove_leafs(TwoChildSibs, []),
-        "Removing no leaves has no effect on the tree."
-    ),
-
-    etap:is(
-        {TwoChildSibs, []},
-        couch_key_tree:remove_leafs(TwoChildSibs, [{0, "1"}]),
-        "Removing a non-existant branch has no effect."
-    ),
-
-    etap:is(
-        {OneChild, [{1, "1b"}]},
-        couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1b"}]),
-        "Removing a leaf removes the leaf."
-    ),
-
-    etap:is(
-        {[], [{1, "1b"},{1, "1a"}]},
-        couch_key_tree:remove_leafs(TwoChildSibs, [{1, "1a"}, {1, "1b"}]),
-        "Removing all leaves returns an empty tree."
-    ),
-
-    etap:is(
-        {Stemmed, []},
-        couch_key_tree:remove_leafs(Stemmed, [{1, "1a"}]),
-        "Removing a non-existant node has no effect."
-    ),
-
-    etap:is(
-        {[], [{2, "1aa"}]},
-        couch_key_tree:remove_leafs(Stemmed, [{2, "1aa"}]),
-        "Removing the last leaf returns an empty tree."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/063-kt-get-leaves.t
----------------------------------------------------------------------
diff --git a/test/etap/063-kt-get-leaves.t b/test/etap/063-kt-get-leaves.t
deleted file mode 100755
index 6d4e800..0000000
--- a/test/etap/063-kt-get-leaves.t
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(11),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-
-    etap:is(
-        {[{"foo", {0, ["1"]}}],[]},
-        couch_key_tree:get(TwoChildSibs, [{0, "1"}]),
-        "extract a subtree."
-    ),
-
-    etap:is(
-        {[{"bar", {1, ["1a", "1"]}}],[]},
-        couch_key_tree:get(TwoChildSibs, [{1, "1a"}]),
-        "extract a subtree."
-    ),
-
-    etap:is(
-        {[],[{0,"x"}]},
-        couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}]),
-        "gather up the leaves."
-    ),
-
-    etap:is(
-        {[{"bar", {1, ["1a","1"]}}],[]},
-        couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}]),
-        "gather up the leaves."
-    ),
-
-    etap:is(
-        {[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
-        couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}]),
-        "gather up the leaves."
-    ),
-
-    etap:is(
-        {[{0,[{"1", "foo"}]}],[]},
-        couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}]),
-        "retrieve full key paths."
-    ),
-
-    etap:is(
-        {[{1,[{"1a", "bar"},{"1", "foo"}]}],[]},
-        couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}]),
-        "retrieve full key paths."
-    ),
-
-    etap:is(
-        [{2, [{"1aa", "bar"},{"1a", "bar"}]}],
-        couch_key_tree:get_all_leafs_full(Stemmed),
-        "retrieve all leaves."
-    ),
-
-    etap:is(
-        [{1, [{"1a", "bar"},{"1", "foo"}]}, {1, [{"1b", "bar"},{"1", "foo"}]}],
-        couch_key_tree:get_all_leafs_full(TwoChildSibs),
-        "retrieve all the leaves."
-    ),
-
-    etap:is(
-        [{"bar", {2, ["1aa","1a"]}}],
-        couch_key_tree:get_all_leafs(Stemmed),
-        "retrieve all leaves."
-    ),
-
-    etap:is(
-        [{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}],
-        couch_key_tree:get_all_leafs(TwoChildSibs),
-        "retrieve all the leaves."
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/064-kt-counting.t
----------------------------------------------------------------------
diff --git a/test/etap/064-kt-counting.t b/test/etap/064-kt-counting.t
deleted file mode 100755
index f182d28..0000000
--- a/test/etap/064-kt-counting.t
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(4),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    EmptyTree = [],
-    One = [{0, {"1","foo",[]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    Stemmed = [{2, {"1bb", "boo", []}}],
-
-    etap:is(0, couch_key_tree:count_leafs(EmptyTree),
-        "Empty trees have no leaves."),
-
-    etap:is(1, couch_key_tree:count_leafs(One),
-        "Single node trees have a single leaf."),
-
-    etap:is(2, couch_key_tree:count_leafs(TwoChildSibs),
-        "Two children siblings counted as two leaves."),
-
-    etap:is(1, couch_key_tree:count_leafs(Stemmed),
-        "Stemming does not affect leaf counting."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/065-kt-stemming.t
----------------------------------------------------------------------
diff --git a/test/etap/065-kt-stemming.t b/test/etap/065-kt-stemming.t
deleted file mode 100755
index 6e781c1..0000000
--- a/test/etap/065-kt-stemming.t
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(3),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
-    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    Stemmed2 = [{2, {"1aa", "bar", []}}],
-
-    etap:is(TwoChild, couch_key_tree:stem(TwoChild, 3),
-        "Stemming more levels than what exists does nothing."),
-
-    etap:is(Stemmed1, couch_key_tree:stem(TwoChild, 2),
-        "Stemming with a depth of two returns the deepest two nodes."),
-
-    etap:is(Stemmed2, couch_key_tree:stem(TwoChild, 1),
-        "Stemming to a depth of one returns the deepest node."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5f6a0b66/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index b6ec287..fe50da3 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,12 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    060-kt-merging.t \
-    061-kt-missing-leaves.t \
-    062-kt-remove-leaves.t \
-    063-kt-get-leaves.t \
-    064-kt-counting.t \
-    065-kt-stemming.t \
     070-couch-db.t \
     072-cleanup.t \
     073-changes.t \


[23/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 082-config-register.t etap test suite to eunit

Merged into couch_config_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/63ba46d1
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/63ba46d1
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/63ba46d1

Branch: refs/heads/1963-eunit
Commit: 63ba46d1336969070cac796a9d1ffd4e0d7cee0f
Parents: 3e66627
Author: Alexander Shorin <kx...@apache.org>
Authored: Mon May 26 09:26:22 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:50:12 2014 +0400

----------------------------------------------------------------------
 test/couchdb/couch_config_tests.erl | 150 ++++++++++++++++++++++++++++++-
 test/etap/082-config-register.t     |  94 -------------------
 test/etap/Makefile.am               |   1 -
 3 files changed, 149 insertions(+), 96 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/63ba46d1/test/couchdb/couch_config_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_config_tests.erl b/test/couchdb/couch_config_tests.erl
index 401da58..1e5d99e 100644
--- a/test/couchdb/couch_config_tests.erl
+++ b/test/couchdb/couch_config_tests.erl
@@ -41,6 +41,32 @@ setup(Chain) ->
     {ok, Pid} = couch_config:start_link(Chain),
     Pid.
 
+setup_register() ->
+    ConfigPid = setup(),
+    SentinelFunc = fun() ->
+        % Ping/Pong to make sure we wait for this
+        % process to die
+        receive
+            {ping, From} ->
+                From ! pong
+        end
+    end,
+    SentinelPid = spawn(SentinelFunc),
+    {ConfigPid, SentinelPid}.
+
+teardown({ConfigPid, SentinelPid}) ->
+    teardown(ConfigPid),
+    case process_info(SentinelPid) of
+        undefined -> ok;
+        _ ->
+            SentinelPid ! {ping, self()},
+            receive
+                pong ->
+                    ok
+            after 100 ->
+                throw({timeout_error, registered_pid})
+            end
+    end;
 teardown(Pid) ->
     couch_config:stop(),
     erlang:monitor(process, Pid),
@@ -62,7 +88,8 @@ couch_config_test_() ->
             couch_config_set_tests(),
             couch_config_del_tests(),
             config_override_tests(),
-            config_persistent_changes_tests()
+            config_persistent_changes_tests(),
+            config_register_tests()
         ]
     }.
 
@@ -150,6 +177,21 @@ config_persistent_changes_tests() ->
         }
     }.
 
+config_register_tests() ->
+    {
+        "Config changes subscriber",
+        {
+            foreach,
+            fun setup_register/0, fun teardown/1,
+            [
+                fun should_handle_port_changes/1,
+                fun should_pass_persistent_flag/1,
+                fun should_not_trigger_handler_on_other_options_changes/1,
+                fun should_not_trigger_handler_after_related_process_death/1
+            ]
+        }
+    }.
+
 
 should_load_all_configs() ->
     ?_assert(length(couch_config:all()) > 0).
@@ -281,6 +323,7 @@ should_write_changes(_, _) ->
         end).
 
 should_ensure_that_default_wasnt_modified(_, _) ->
+    %% depended on should_write_changes test
     ?_assert(
         begin
             ?assertEqual("5984",
@@ -291,6 +334,7 @@ should_ensure_that_default_wasnt_modified(_, _) ->
         end).
 
 should_ensure_that_written_to_last_config_in_chain(_, _) ->
+    %% depended on should_write_changes test
     ?_assert(
         begin
             ?assertEqual("8080",
@@ -299,3 +343,107 @@ should_ensure_that_written_to_last_config_in_chain(_, _) ->
                          couch_config:get("httpd", "bind_address")),
             true
         end).
+
+should_handle_port_changes({_, SentinelPid}) ->
+    ?_assert(
+        begin
+            MainProc = self(),
+            Port = "8080",
+
+            couch_config:register(
+                fun("httpd", "port", Value) ->
+                    % couch_config catches every error raised from handler
+                    % so it's not possible to just assert on wrong value.
+                    % We have to return the result as message
+                    MainProc ! (Value =:= Port)
+                end,
+                SentinelPid
+            ),
+            ok = couch_config:set("httpd", "port", Port, false),
+
+            receive
+                R ->
+                    R
+            after 1000 ->
+                throw({timeout_error, registered_pid})
+            end
+        end
+    ).
+
+should_pass_persistent_flag({_, SentinelPid}) ->
+    ?_assert(
+        begin
+            MainProc = self(),
+
+            couch_config:register(
+                fun("httpd", "port", _, Persist) ->
+                    % couch_config catches every error raised from handler
+                    % so it's not possible to just assert on wrong value.
+                    % We have to return the result as message
+                    MainProc ! Persist
+                end,
+                SentinelPid
+            ),
+            ok = couch_config:set("httpd", "port", "8080", false),
+
+            receive
+                false ->
+                    true
+            after 100 ->
+                false
+            end
+        end
+    ).
+
+should_not_trigger_handler_on_other_options_changes({_, SentinelPid}) ->
+    ?_assert(
+        begin
+            MainProc = self(),
+
+            couch_config:register(
+                fun("httpd", "port", _) ->
+                    MainProc ! ok
+                end,
+                SentinelPid
+            ),
+            ok = couch_config:set("httpd", "bind_address", "0.0.0.0", false),
+
+            receive
+                ok ->
+                    false
+            after 100 ->
+                true
+            end
+        end
+    ).
+
+should_not_trigger_handler_after_related_process_death({_, SentinelPid}) ->
+    ?_assert(
+        begin
+            MainProc = self(),
+
+            couch_config:register(
+                fun("httpd", "port", _) ->
+                    MainProc ! ok
+                end,
+                SentinelPid
+            ),
+
+            SentinelPid ! {ping, MainProc},
+            receive
+                pong ->
+                    ok
+            after 100 ->
+                throw({timeout_error, registered_pid})
+            end,
+
+            ok = couch_config:set("httpd", "port", "12345", false),
+
+            receive
+                ok ->
+                    false
+            after 100 ->
+                true
+            end
+        end
+    ).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/63ba46d1/test/etap/082-config-register.t
----------------------------------------------------------------------
diff --git a/test/etap/082-config-register.t b/test/etap/082-config-register.t
deleted file mode 100755
index 191ba8f..0000000
--- a/test/etap/082-config-register.t
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(5),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link([default_config()]),
-
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "5984",
-        "{httpd, port} is 5984 by default."
-    ),
-
-    ok = couch_config:set("httpd", "port", "4895", false),
-
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "4895",
-        "{httpd, port} changed to 4895"
-    ),
-
-    SentinelFunc = fun() ->
-        % Ping/Pong to make sure we wait for this
-        % process to die
-        receive {ping, From} -> From ! pong end
-    end,
-    SentinelPid = spawn(SentinelFunc),
-
-    couch_config:register(
-        fun("httpd", "port", Value) ->
-            etap:is(Value, "8080", "Registered function got notification.")
-        end,
-        SentinelPid
-    ),
-
-    ok = couch_config:set("httpd", "port", "8080", false),
-
-    % Implicitly checking that we *don't* call the function
-    etap:is(
-        couch_config:get("httpd", "bind_address"),
-        "127.0.0.1",
-        "{httpd, bind_address} is not '0.0.0.0'"
-    ),
-    ok = couch_config:set("httpd", "bind_address", "0.0.0.0", false),
-
-    % Ping-Pong kill process
-    SentinelPid ! {ping, self()},
-    receive
-        _Any -> ok
-    after 1000 ->
-        throw({timeout_error, registered_pid})
-    end,
-
-    ok = couch_config:set("httpd", "port", "80", false),
-    etap:is(
-        couch_config:get("httpd", "port"),
-        "80",
-        "Implicitly test that the function got de-registered"
-    ),
-
-    % test passing of Persist flag
-    couch_config:register(
-        fun("httpd", _, _, Persist) ->
-            etap:is(Persist, false)
-        end),
-    ok = couch_config:set("httpd", "port", "80", false),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/63ba46d1/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 07583ac..25889f4 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    082-config-register.t \
     083-config-no-files.t \
     090-task-status.t \
     100-ref-counter.t \


[30/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 140-attachments-comp.t etap test suite to eunit

- Merge into couchdb_attachments_tests suite;
- Add PUT requests to test_request util;
- Remove dependency from files outside fixtures directory;
- Group test cases to reduce amount of duplicate code;
- Fix hidden issue with gzip encoding: for encoding_length stub info
  check using zlib:gzip on 2KiB+ files leads to mismatch by 2-4 bytes
  and this difference grows with file size. Using gzip fun code from
  couch_stream solves the issue.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/a36d6305
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/a36d6305
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/a36d6305

Branch: refs/heads/1963-eunit
Commit: a36d6305003cc5188c472997bdb3b436284de097
Parents: f9199df
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 01:51:46 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:48 2014 +0400

----------------------------------------------------------------------
 license.skip                               |   1 +
 test/couchdb/Makefile.am                   |   3 +-
 test/couchdb/couchdb_attachments_tests.erl | 429 +++++++++++++-
 test/couchdb/fixtures/logo.png             | Bin 0 -> 3010 bytes
 test/couchdb/test_request.erl              |   9 +-
 test/etap/140-attachment-comp.t            | 728 ------------------------
 test/etap/Makefile.am                      |   1 -
 7 files changed, 413 insertions(+), 758 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/license.skip
----------------------------------------------------------------------
diff --git a/license.skip b/license.skip
index 5d10e6c..b069451 100644
--- a/license.skip
+++ b/license.skip
@@ -170,6 +170,7 @@
 ^test/bench/Makefile.in
 ^test/couchdb/Makefile
 ^test/couchdb/Makefile.in
+^test/couchdb/fixtures/logo.png
 ^test/etap/.*.beam
 ^test/etap/.*.o
 ^test/etap/.deps/.*

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 563a916..ccfcbcf 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -45,7 +45,8 @@ fixture_files = \
     fixtures/couch_config_tests_1.ini \
     fixtures/couch_config_tests_2.ini \
     fixtures/couch_stats_aggregates.cfg \
-    fixtures/couch_stats_aggregates.ini
+    fixtures/couch_stats_aggregates.ini \
+    fixtures/logo.png
 
 EXTRA_DIST = \
     run.in \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/test/couchdb/couchdb_attachments_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_attachments_tests.erl b/test/couchdb/couchdb_attachments_tests.erl
index d5a49ee..4b88a7e 100644
--- a/test/couchdb/couchdb_attachments_tests.erl
+++ b/test/couchdb/couchdb_attachments_tests.erl
@@ -15,6 +15,11 @@
 -include_lib("../../src/couchdb/couch_db.hrl").
 -include_lib("couchdb_tests.hrl").
 
+-define(COMPRESSION_LEVEL, 8).
+-define(ATT_BIN_NAME, <<"logo.png">>).
+-define(ATT_TXT_NAME, <<"file.erl">>).
+-define(FIXTURE_PNG, filename:join([?FIXTURESDIR, "logo.png"])).
+-define(FIXTURE_TXT, ?FILE).
 -define(TIMEWAIT, 100).
 -define(i2l(I), integer_to_list(I)).
 
@@ -23,6 +28,10 @@ start() ->
     couch_server_sup:start_link(?CONFIG_CHAIN),
     % disable logging to reduce noise in stdout
     couch_config:set("log", "level", "none", false),
+    % ensure in default compression settings for attachments_compression_tests
+    couch_config:set("attachments", "compression_level",
+                     ?i2l(?COMPRESSION_LEVEL), false),
+    couch_config:set("attachments", "compressible_types", "text/*", false),
     ok.
 
 stop(_) ->
@@ -38,7 +47,35 @@ setup() ->
     Host = Addr ++ ":" ++ ?i2l(Port),
     {Host, ?b2l(DbName)}.
 
+setup({binary, standalone}) ->
+    {Host, DbName} = setup(),
+        setup_att(fun create_standalone_png_att/2, Host, DbName, ?FIXTURE_PNG);
+setup({text, standalone}) ->
+    {Host, DbName} = setup(),
+    setup_att(fun create_standalone_text_att/2, Host, DbName, ?FIXTURE_TXT);
+setup({binary, inline}) ->
+    {Host, DbName} = setup(),
+    setup_att(fun create_inline_png_att/2, Host, DbName, ?FIXTURE_PNG);
+setup({text, inline}) ->
+    {Host, DbName} = setup(),
+    setup_att(fun create_inline_text_att/2, Host, DbName, ?FIXTURE_TXT);
+setup(compressed) ->
+    {Host, DbName} = setup(),
+    setup_att(fun create_already_compressed_att/2, Host, DbName, ?FIXTURE_TXT).
+setup_att(Fun, Host, DbName, File) ->
+    HttpHost = "http://" ++ Host,
+    AttUrl = Fun(HttpHost, DbName),
+    {ok, Data} = file:read_file(File),
+    DocUrl = string:join([HttpHost, DbName, "doc"], "/"),
+    Helpers = {DbName, DocUrl, AttUrl},
+    {Data, Helpers}.
+
+teardown(_, {_, {DbName, _, _}}) ->
+    teardown(DbName).
+
 teardown({_, DbName}) ->
+    teardown(DbName);
+teardown(DbName) ->
     ok = couch_server:delete(?l2b(DbName), []),
     ok.
 
@@ -50,7 +87,8 @@ attachments_test_() ->
             setup,
             fun start/0, fun stop/1,
             [
-                attachments_md5_tests()
+                attachments_md5_tests(),
+                attachments_compression_tests()
             ]
         }
     }.
@@ -74,9 +112,70 @@ attachments_md5_tests() ->
         }
     }.
 
+attachments_compression_tests() ->
+    Funs = [
+         fun should_get_att_without_accept_gzip_encoding/2,
+         fun should_get_att_with_accept_gzip_encoding/2,
+         fun should_get_att_with_accept_deflate_encoding/2,
+         fun should_return_406_response_on_unsupported_encoding/2,
+         fun should_get_doc_with_att_data/2,
+         fun should_get_doc_with_att_data_stub/2
+    ],
+    {
+        "Attachments compression tests",
+        [
+            {
+                "Created via Attachments API",
+                created_attachments_compression_tests(standalone, Funs)
+            },
+            {
+                "Created inline via Document API",
+                created_attachments_compression_tests(inline, Funs)
+            },
+            {
+                "Created already been compressed via Attachments API",
+                {
+                    foreachx,
+                    fun setup/1, fun teardown/2,
+                    [{compressed, Fun} || Fun <- Funs]
+                }
+            },
+            {
+                foreach,
+                fun setup/0, fun teardown/1,
+                [
+                    fun should_not_create_compressed_att_with_deflate_encoding/1,
+                    fun should_not_create_compressed_att_with_compress_encoding/1,
+                    fun should_create_compressible_att_with_ctype_params/1
+                ]
+            }
+        ]
+    }.
+
+created_attachments_compression_tests(Mod, Funs) ->
+    [
+        {
+            "Compressiable attachments",
+            {
+                foreachx,
+                fun setup/1, fun teardown/2,
+                [{{text, Mod}, Fun} || Fun <- Funs]
+            }
+        },
+        {
+            "Uncompressiable attachments",
+            {
+                foreachx,
+                fun setup/1, fun teardown/2,
+                [{{binary, Mod}, Fun} || Fun <- Funs]
+            }
+        }
+    ].
+
+
 
 should_upload_attachment_without_md5({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             Body = "We all live in a yellow submarine!",
@@ -87,12 +186,11 @@ should_upload_attachment_without_md5({Host, DbName}) ->
             ],
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(201, Code),
-            ?assertEqual(true, get_json(Json, [<<"ok">>])),
-            true
+            ?assertEqual(true, get_json(Json, [<<"ok">>]))
         end).
 
 should_upload_attachment_by_chunks_without_md5({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             AttData = <<"We all live in a yellow submarine!">>,
@@ -105,12 +203,11 @@ should_upload_attachment_by_chunks_without_md5({Host, DbName}) ->
             ],
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(201, Code),
-            ?assertEqual(true, get_json(Json, [<<"ok">>])),
-            true
+            ?assertEqual(true, get_json(Json, [<<"ok">>]))
         end).
 
 should_upload_attachment_with_valid_md5_header({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             Body = "We all live in a yellow submarine!",
@@ -122,12 +219,11 @@ should_upload_attachment_with_valid_md5_header({Host, DbName}) ->
             ],
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(201, Code),
-            ?assertEqual(true, get_json(Json, [<<"ok">>])),
-            true
+            ?assertEqual(true, get_json(Json, [<<"ok">>]))
         end).
 
 should_upload_attachment_by_chunks_with_valid_md5_header({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             AttData = <<"We all live in a yellow submarine!">>,
@@ -141,12 +237,11 @@ should_upload_attachment_by_chunks_with_valid_md5_header({Host, DbName}) ->
             ],
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(201, Code),
-            ?assertEqual(true, get_json(Json, [<<"ok">>])),
-            true
+            ?assertEqual(true, get_json(Json, [<<"ok">>]))
         end).
 
 should_upload_attachment_by_chunks_with_valid_md5_trailer({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             AttData = <<"We all live in a yellow submarine!">>,
@@ -162,12 +257,11 @@ should_upload_attachment_by_chunks_with_valid_md5_trailer({Host, DbName}) ->
             ],
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(201, Code),
-            ?assertEqual(true, get_json(Json, [<<"ok">>])),
-            true
+            ?assertEqual(true, get_json(Json, [<<"ok">>]))
         end).
 
 should_reject_attachment_with_invalid_md5({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             Body = "We all live in a yellow submarine!",
@@ -180,13 +274,12 @@ should_reject_attachment_with_invalid_md5({Host, DbName}) ->
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(400, Code),
             ?assertEqual(<<"content_md5_mismatch">>,
-                         get_json(Json, [<<"error">>])),
-            true
+                         get_json(Json, [<<"error">>]))
         end).
 
 
 should_reject_chunked_attachment_with_invalid_md5({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             AttData = <<"We all live in a yellow submarine!">>,
@@ -201,12 +294,11 @@ should_reject_chunked_attachment_with_invalid_md5({Host, DbName}) ->
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(400, Code),
             ?assertEqual(<<"content_md5_mismatch">>,
-                         get_json(Json, [<<"error">>])),
-            true
+                         get_json(Json, [<<"error">>]))
         end).
 
 should_reject_chunked_attachment_with_invalid_md5_trailer({Host, DbName}) ->
-    ?_assert(
+    ?_test(
         begin
             AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
             AttData = <<"We all live in a yellow submarine!">>,
@@ -223,8 +315,228 @@ should_reject_chunked_attachment_with_invalid_md5_trailer({Host, DbName}) ->
             {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
             ?assertEqual(400, Code),
             ?assertEqual(<<"content_md5_mismatch">>,
-                         get_json(Json, [<<"error">>])),
-            true
+                         get_json(Json, [<<"error">>]))
+        end).
+
+should_get_att_without_accept_gzip_encoding(_, {Data, {_, _, AttUrl}}) ->
+    ?_test(
+        begin
+            {ok, Code, Headers, Body} = test_request:get(AttUrl),
+            ?assertEqual(200, Code),
+            ?assertNot(lists:member({"Content-Encoding", "gzip"}, Headers)),
+            ?assertEqual(Data, iolist_to_binary(Body))
+        end).
+
+should_get_att_with_accept_gzip_encoding(compressed, {Data, {_, _, AttUrl}}) ->
+    ?_test(
+        begin
+            {ok, Code, Headers, Body} = test_request:get(
+                AttUrl, [{"Accept-Encoding", "gzip"}]),
+            ?assertEqual(200, Code),
+            ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)),
+            ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body)))
+        end);
+should_get_att_with_accept_gzip_encoding({text, _}, {Data, {_, _, AttUrl}}) ->
+    ?_test(
+        begin
+            {ok, Code, Headers, Body} = test_request:get(
+                AttUrl, [{"Accept-Encoding", "gzip"}]),
+            ?assertEqual(200, Code),
+            ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)),
+            ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body)))
+        end);
+should_get_att_with_accept_gzip_encoding({binary, _}, {Data, {_, _, AttUrl}}) ->
+    ?_test(
+        begin
+            {ok, Code, Headers, Body} = test_request:get(
+                AttUrl, [{"Accept-Encoding", "gzip"}]),
+            ?assertEqual(200, Code),
+            ?assertEqual(undefined,
+                         couch_util:get_value("Content-Encoding", Headers)),
+            ?assertEqual(Data, iolist_to_binary(Body))
+        end).
+
+should_get_att_with_accept_deflate_encoding(_, {Data, {_, _, AttUrl}}) ->
+    ?_test(
+        begin
+            {ok, Code, Headers, Body} = test_request:get(
+                AttUrl, [{"Accept-Encoding", "deflate"}]),
+            ?assertEqual(200, Code),
+            ?assertEqual(undefined,
+                         couch_util:get_value("Content-Encoding", Headers)),
+            ?assertEqual(Data, iolist_to_binary(Body))
+        end).
+
+should_return_406_response_on_unsupported_encoding(_, {_, {_, _, AttUrl}}) ->
+    ?_assertEqual(406,
+        begin
+            {ok, Code, _, _} = test_request:get(
+                AttUrl, [{"Accept-Encoding", "deflate, *;q=0"}]),
+            Code
+        end).
+
+should_get_doc_with_att_data(compressed, {Data, {_, DocUrl, _}}) ->
+    ?_test(
+        begin
+            Url = DocUrl ++ "?attachments=true",
+            {ok, Code, _, Body} = test_request:get(
+                Url, [{"Accept", "application/json"}]),
+            ?assertEqual(200, Code),
+            Json = ejson:decode(Body),
+            AttJson = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+            AttData = couch_util:get_nested_json_value(
+                AttJson, [<<"data">>]),
+            ?assertEqual(
+                <<"text/plain">>,
+                couch_util:get_nested_json_value(AttJson,[<<"content_type">>])),
+            ?assertEqual(Data, base64:decode(AttData))
+        end);
+should_get_doc_with_att_data({text, _}, {Data, {_, DocUrl, _}}) ->
+    ?_test(
+        begin
+            Url = DocUrl ++ "?attachments=true",
+            {ok, Code, _, Body} = test_request:get(
+                Url, [{"Accept", "application/json"}]),
+            ?assertEqual(200, Code),
+            Json = ejson:decode(Body),
+            AttJson = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+            AttData = couch_util:get_nested_json_value(
+                AttJson, [<<"data">>]),
+            ?assertEqual(
+                <<"text/plain">>,
+                couch_util:get_nested_json_value(AttJson,[<<"content_type">>])),
+            ?assertEqual(Data, base64:decode(AttData))
+        end);
+should_get_doc_with_att_data({binary, _}, {Data, {_, DocUrl, _}}) ->
+    ?_test(
+        begin
+            Url = DocUrl ++ "?attachments=true",
+            {ok, Code, _, Body} = test_request:get(
+                Url, [{"Accept", "application/json"}]),
+            ?assertEqual(200, Code),
+            Json = ejson:decode(Body),
+            AttJson = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_BIN_NAME]),
+            AttData = couch_util:get_nested_json_value(
+                AttJson, [<<"data">>]),
+            ?assertEqual(
+                <<"image/png">>,
+                couch_util:get_nested_json_value(AttJson,[<<"content_type">>])),
+            ?assertEqual(Data, base64:decode(AttData))
+        end).
+
+should_get_doc_with_att_data_stub(compressed, {Data, {_, DocUrl, _}}) ->
+    ?_test(
+        begin
+            Url = DocUrl ++ "?att_encoding_info=true",
+            {ok, Code, _, Body} = test_request:get(
+                Url, [{"Accept", "application/json"}]),
+            ?assertEqual(200, Code),
+            Json = ejson:decode(Body),
+            {AttJson} = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+            ?assertEqual(<<"gzip">>,
+                         couch_util:get_value(<<"encoding">>, AttJson)),
+            AttLength = couch_util:get_value(<<"length">>, AttJson),
+            EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
+            ?assertEqual(AttLength, EncLength),
+            ?assertEqual(iolist_size(zlib:gzip(Data)), AttLength)
+        end);
+should_get_doc_with_att_data_stub({text, _}, {Data, {_, DocUrl, _}}) ->
+    ?_test(
+        begin
+            Url = DocUrl ++ "?att_encoding_info=true",
+            {ok, Code, _, Body} = test_request:get(
+                Url, [{"Accept", "application/json"}]),
+            ?assertEqual(200, Code),
+            Json = ejson:decode(Body),
+            {AttJson} = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+            ?assertEqual(<<"gzip">>,
+                         couch_util:get_value(<<"encoding">>, AttJson)),
+            AttEncLength = iolist_size(gzip(Data)),
+            ?assertEqual(AttEncLength,
+                         couch_util:get_value(<<"encoded_length">>, AttJson)),
+            ?assertEqual(byte_size(Data),
+                         couch_util:get_value(<<"length">>, AttJson))
+        end);
+should_get_doc_with_att_data_stub({binary, _}, {Data, {_, DocUrl, _}}) ->
+    ?_test(
+        begin
+            Url = DocUrl ++ "?att_encoding_info=true",
+            {ok, Code, _, Body} = test_request:get(
+                Url, [{"Accept", "application/json"}]),
+            ?assertEqual(200, Code),
+            Json = ejson:decode(Body),
+            {AttJson} = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_BIN_NAME]),
+            ?assertEqual(undefined,
+                         couch_util:get_value(<<"encoding">>, AttJson)),
+            ?assertEqual(undefined,
+                         couch_util:get_value(<<"encoded_length">>, AttJson)),
+            ?assertEqual(byte_size(Data),
+                         couch_util:get_value(<<"length">>, AttJson))
+        end).
+
+should_not_create_compressed_att_with_deflate_encoding({Host, DbName}) ->
+    ?_assertEqual(415,
+        begin
+            HttpHost = "http://" ++ Host,
+            AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"),
+            {ok, Data} = file:read_file(?FIXTURE_TXT),
+            Body = zlib:compress(Data),
+            Headers = [
+                {"Content-Encoding", "deflate"},
+                {"Content-Type", "text/plain"}
+            ],
+            {ok, Code, _, _} = test_request:put(AttUrl, Headers, Body),
+            Code
+        end).
+
+should_not_create_compressed_att_with_compress_encoding({Host, DbName}) ->
+    % Note: As of OTP R13B04, it seems there's no LZW compression
+    % (i.e. UNIX compress utility implementation) lib in OTP.
+    % However there's a simple working Erlang implementation at:
+    % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
+    ?_assertEqual(415,
+        begin
+            HttpHost = "http://" ++ Host,
+            AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"),
+            {ok, Data} = file:read_file(?FIXTURE_TXT),
+            Headers = [
+                {"Content-Encoding", "compress"},
+                {"Content-Type", "text/plain"}
+            ],
+            {ok, Code, _, _} = test_request:put(AttUrl, Headers, Data),
+            Code
+        end).
+
+should_create_compressible_att_with_ctype_params({Host, DbName}) ->
+    ?_test(
+        begin
+            HttpHost = "http://" ++ Host,
+            DocUrl = string:join([HttpHost, DbName, ?docid()], "/"),
+            AttUrl = string:join([DocUrl, ?b2l(?ATT_TXT_NAME)], "/"),
+            {ok, Data} = file:read_file(?FIXTURE_TXT),
+            Headers = [{"Content-Type", "text/plain; charset=UTF-8"}],
+            {ok, Code0, _, _} = test_request:put(AttUrl, Headers, Data),
+            ?assertEqual(201, Code0),
+
+            {ok, Code1, _, Body} = test_request:get(
+                DocUrl ++ "?att_encoding_info=true"),
+            ?assertEqual(200, Code1),
+            Json = ejson:decode(Body),
+            {AttJson} = couch_util:get_nested_json_value(
+                Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+            ?assertEqual(<<"gzip">>,
+                         couch_util:get_value(<<"encoding">>, AttJson)),
+            AttEncLength = iolist_size(gzip(Data)),
+            ?assertEqual(AttEncLength,
+                         couch_util:get_value(<<"encoded_length">>, AttJson)),
+            ?assertEqual(byte_size(Data),
+                         couch_util:get_value(<<"length">>, AttJson))
         end).
 
 
@@ -274,5 +586,68 @@ request(Method, Url, Headers, Body) ->
     Json = ejson:decode(Body1),
     {ok, Code, Json}.
 
-
-
+create_standalone_text_att(Host, DbName) ->
+    {ok, Data} = file:read_file(?FIXTURE_TXT),
+    Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"),
+    {ok, Code, _Headers, _Body} = test_request:put(
+        Url, [{"Content-Type", "text/plain"}], Data),
+    ?assertEqual(201, Code),
+    Url.
+
+create_standalone_png_att(Host, DbName) ->
+    {ok, Data} = file:read_file(?FIXTURE_PNG),
+    Url = string:join([Host, DbName, "doc", ?b2l(?ATT_BIN_NAME)], "/"),
+    {ok, Code, _Headers, _Body} = test_request:put(
+        Url, [{"Content-Type", "image/png"}], Data),
+    ?assertEqual(201, Code),
+    Url.
+
+create_inline_text_att(Host, DbName) ->
+    {ok, Data} = file:read_file(?FIXTURE_TXT),
+    Url = string:join([Host, DbName, "doc"], "/"),
+    Doc = {[
+        {<<"_attachments">>, {[
+            {?ATT_TXT_NAME, {[
+                {<<"content_type">>, <<"text/plain">>},
+                {<<"data">>, base64:encode(Data)}
+            ]}
+        }]}}
+    ]},
+    {ok, Code, _Headers, _Body} = test_request:put(
+        Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)),
+    ?assertEqual(201, Code),
+    string:join([Url, ?b2l(?ATT_TXT_NAME)], "/").
+
+create_inline_png_att(Host, DbName) ->
+    {ok, Data} = file:read_file(?FIXTURE_PNG),
+    Url = string:join([Host, DbName, "doc"], "/"),
+    Doc = {[
+        {<<"_attachments">>, {[
+            {?ATT_BIN_NAME, {[
+                {<<"content_type">>, <<"image/png">>},
+                {<<"data">>, base64:encode(Data)}
+            ]}
+        }]}}
+    ]},
+    {ok, Code, _Headers, _Body} = test_request:put(
+        Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)),
+    ?assertEqual(201, Code),
+    string:join([Url, ?b2l(?ATT_BIN_NAME)], "/").
+
+create_already_compressed_att(Host, DbName) ->
+    {ok, Data} = file:read_file(?FIXTURE_TXT),
+    Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"),
+    {ok, Code, _Headers, _Body} = test_request:put(
+        Url, [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}],
+        zlib:gzip(Data)),
+    ?assertEqual(201, Code),
+    Url.
+
+gzip(Data) ->
+    Z = zlib:open(),
+    ok = zlib:deflateInit(Z, ?COMPRESSION_LEVEL, deflated, 16 + 15, 8, default),
+    zlib:deflate(Z, Data),
+    Last = zlib:deflate(Z, [], finish),
+    ok = zlib:deflateEnd(Z),
+    ok = zlib:close(Z),
+    Last.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/test/couchdb/fixtures/logo.png
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/logo.png b/test/couchdb/fixtures/logo.png
new file mode 100644
index 0000000..d21ac02
Binary files /dev/null and b/test/couchdb/fixtures/logo.png differ

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/test/couchdb/test_request.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/test_request.erl b/test/couchdb/test_request.erl
index cd6e310..10801ae 100644
--- a/test/couchdb/test_request.erl
+++ b/test/couchdb/test_request.erl
@@ -12,7 +12,7 @@
 
 -module(test_request).
 
--export([get/1, get/2]).
+-export([get/1, get/2, put/2, put/3]).
 -export([request/3, request/4]).
 
 get(Url) ->
@@ -20,6 +20,13 @@ get(Url) ->
 get(Url, Headers) ->
     request(get, Url, Headers).
 
+put(Url, Body) ->
+    request(put, Url, [], Body).
+
+put(Url, Headers, Body) ->
+    request(put, Url, Headers, Body).
+
+
 request(Method, Url, Headers) ->
     request(Method, Url, Headers, []).
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/test/etap/140-attachment-comp.t
----------------------------------------------------------------------
diff --git a/test/etap/140-attachment-comp.t b/test/etap/140-attachment-comp.t
deleted file mode 100755
index 6f075ce..0000000
--- a/test/etap/140-attachment-comp.t
+++ /dev/null
@@ -1,728 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-test_db_name() ->
-    <<"couch_test_atts_compression">>.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(85),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    put(addr, couch_config:get("httpd", "bind_address", "127.0.0.1")),
-    put(port, integer_to_list(mochiweb_socket_server:get(couch_httpd, port))),
-    timer:sleep(1000),
-    couch_server:delete(test_db_name(), []),
-    couch_db:create(test_db_name(), []),
-
-    couch_config:set("attachments", "compression_level", "8", false),
-    couch_config:set("attachments", "compressible_types", "text/*", false),
-
-    create_1st_text_att(),
-    create_1st_png_att(),
-    create_2nd_text_att(),
-    create_2nd_png_att(),
-
-    tests_for_1st_text_att(),
-    tests_for_1st_png_att(),
-    tests_for_2nd_text_att(),
-    tests_for_2nd_png_att(),
-
-    create_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
-    test_already_compressed_att(db_url() ++ "/doc_comp_att", "readme.txt"),
-
-    test_create_already_compressed_att_with_invalid_content_encoding(
-        db_url() ++ "/doc_att_deflate",
-        "readme.txt",
-        zlib:compress(test_text_data()),
-        "deflate"
-    ),
-
-    % COUCHDB-1711 - avoid weird timng/scheduling/request handling issue
-    timer:sleep(100),
-
-    test_create_already_compressed_att_with_invalid_content_encoding(
-        db_url() ++ "/doc_att_compress",
-        "readme.txt",
-        % Note: As of OTP R13B04, it seems there's no LZW compression
-        % (i.e. UNIX compress utility implementation) lib in OTP.
-        % However there's a simple working Erlang implementation at:
-        % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
-        test_text_data(),
-        "compress"
-    ),
-
-    test_compressible_type_with_parameters(),
-
-    timer:sleep(3000), % to avoid mochiweb socket closed exceptions
-    couch_server:delete(test_db_name(), []),
-    couch_server_sup:stop(),
-    ok.
-
-db_url() ->
-    "http://" ++ get(addr) ++ ":" ++ get(port) ++ "/" ++
-    binary_to_list(test_db_name()).
-
-create_1st_text_att() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Content-Type", "text/plain"}],
-        put,
-        test_text_data()),
-    etap:is(Code, 201, "Created text attachment using the standalone api"),
-    ok.
-
-create_1st_png_att() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [{"Content-Type", "image/png"}],
-        put,
-        test_png_data()),
-    etap:is(Code, 201, "Created png attachment using the standalone api"),
-    ok.
-
-% create a text attachment using the non-standalone attachment api
-create_2nd_text_att() ->
-    DocJson = {[
-        {<<"_attachments">>, {[
-            {<<"readme.txt">>, {[
-                {<<"content_type">>, <<"text/plain">>},
-                {<<"data">>, base64:encode(test_text_data())}
-            ]}
-        }]}}
-    ]},
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc3",
-        [{"Content-Type", "application/json"}],
-        put,
-        ejson:encode(DocJson)),
-    etap:is(Code, 201, "Created text attachment using the non-standalone api"),
-    ok.
-
-% create a png attachment using the non-standalone attachment api
-create_2nd_png_att() ->
-    DocJson = {[
-        {<<"_attachments">>, {[
-            {<<"icon.png">>, {[
-                {<<"content_type">>, <<"image/png">>},
-                {<<"data">>, base64:encode(test_png_data())}
-            ]}
-        }]}}
-    ]},
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc4",
-        [{"Content-Type", "application/json"}],
-        put,
-        ejson:encode(DocJson)),
-    etap:is(Code, 201, "Created png attachment using the non-standalone api"),
-    ok.
-
-create_already_compressed_att(DocUri, AttName) ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}],
-        put,
-        zlib:gzip(test_text_data())),
-    etap:is(
-        Code,
-        201,
-        "Created already compressed attachment using the standalone api"
-    ),
-    ok.
-
-tests_for_1st_text_att() ->
-    test_get_1st_text_att_with_accept_encoding_gzip(),
-    test_get_1st_text_att_without_accept_encoding_header(),
-    test_get_1st_text_att_with_accept_encoding_deflate(),
-    test_get_1st_text_att_with_accept_encoding_deflate_only(),
-    test_get_doc_with_1st_text_att(),
-    test_1st_text_att_stub().
-
-tests_for_1st_png_att() ->
-    test_get_1st_png_att_without_accept_encoding_header(),
-    test_get_1st_png_att_with_accept_encoding_gzip(),
-    test_get_1st_png_att_with_accept_encoding_deflate(),
-    test_get_doc_with_1st_png_att(),
-    test_1st_png_att_stub().
-
-tests_for_2nd_text_att() ->
-    test_get_2nd_text_att_with_accept_encoding_gzip(),
-    test_get_2nd_text_att_without_accept_encoding_header(),
-    test_get_doc_with_2nd_text_att(),
-    test_2nd_text_att_stub().
-
-tests_for_2nd_png_att() ->
-    test_get_2nd_png_att_without_accept_encoding_header(),
-    test_get_2nd_png_att_with_accept_encoding_gzip(),
-    test_get_doc_with_2nd_png_att(),
-    test_2nd_png_att_stub().
-
-test_get_1st_text_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, true, "received body is gziped"),
-    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
-    etap:is(
-        Uncompressed,
-        test_text_data(),
-        "received data for the 1st text attachment is ok"
-    ),
-    ok.
-
-test_get_1st_text_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_text_data(),
-        "received data for the 1st text attachment is ok"
-    ),
-    ok.
-
-test_get_1st_text_att_with_accept_encoding_deflate() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Accept-Encoding", "deflate"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    Deflated = lists:member({"Content-Encoding", "deflate"}, Headers),
-    etap:is(Deflated, false, "received body is not deflated"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_text_data(),
-        "received data for the 1st text attachment is ok"
-    ),
-    ok.
-
-test_get_1st_text_att_with_accept_encoding_deflate_only() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc1/readme.txt",
-        [{"Accept-Encoding", "deflate, *;q=0"}],
-        get),
-    etap:is(
-        Code,
-        406,
-        "HTTP response code is 406 for an unsupported content encoding request"
-    ),
-    ok.
-
-test_get_1st_png_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Encoding = couch_util:get_value("Content-Encoding", Headers),
-    etap:is(Encoding, undefined, "received body is not gziped"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_png_data(),
-        "received data for the 1st png attachment is ok"
-    ),
-    ok.
-
-test_get_1st_png_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Encoding = couch_util:get_value("Content-Encoding", Headers),
-    etap:is(Encoding, undefined, "received body is not gziped"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_png_data(),
-        "received data for the 1st png attachment is ok"
-    ),
-    ok.
-
-test_get_1st_png_att_with_accept_encoding_deflate() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2/icon.png",
-        [{"Accept-Encoding", "deflate"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Encoding = couch_util:get_value("Content-Encoding", Headers),
-    etap:is(Encoding, undefined, "received body is in identity form"),
-    etap:is(
-        iolist_to_binary(Body),
-        test_png_data(),
-        "received data for the 1st png attachment is ok"
-    ),
-    ok.
-
-test_get_doc_with_1st_text_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    TextAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttType = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"content_type">>]
-    ),
-    TextAttData = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"data">>]
-    ),
-    etap:is(
-        TextAttType,
-        <<"text/plain">>,
-        "1st text attachment has type text/plain"
-    ),
-    %% check the attachment's data is the base64 encoding of the plain text
-    %% and not the base64 encoding of the gziped plain text
-    etap:is(
-        TextAttData,
-        base64:encode(test_text_data()),
-        "1st text attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_1st_text_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc1?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {TextAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
-    etap:is(
-        TextAttLength,
-        byte_size(test_text_data()),
-        "1st text attachment stub length matches the uncompressed length"
-    ),
-    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
-    etap:is(
-        TextAttEncoding,
-        <<"gzip">>,
-        "1st text attachment stub has the encoding field set to gzip"
-    ),
-    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
-    etap:is(
-        TextAttEncLength,
-        iolist_size(zlib:gzip(test_text_data())),
-        "1st text attachment stub encoded_length matches the compressed length"
-    ),
-    ok.
-
-test_get_doc_with_1st_png_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    PngAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttType = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"content_type">>]
-    ),
-    PngAttData = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"data">>]
-    ),
-    etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
-    etap:is(
-        PngAttData,
-        base64:encode(test_png_data()),
-        "1st png attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_1st_png_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc2?att_encoding_info=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {PngAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
-    etap:is(
-        PngAttLength,
-        byte_size(test_png_data()),
-        "1st png attachment stub length matches the uncompressed length"
-    ),
-    PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
-    etap:is(
-        PngEncoding,
-        undefined,
-        "1st png attachment stub doesn't have an encoding field"
-    ),
-    PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
-    etap:is(
-        PngEncLength,
-        undefined,
-        "1st png attachment stub doesn't have an encoded_length field"
-    ),
-    ok.
-
-test_get_2nd_text_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3/readme.txt",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, true, "received body is gziped"),
-    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
-    etap:is(
-        Uncompressed,
-        test_text_data(),
-        "received data for the 2nd text attachment is ok"
-    ),
-    ok.
-
-test_get_2nd_text_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3/readme.txt",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_text_data(),
-        "received data for the 2nd text attachment is ok"
-    ),
-    ok.
-
-test_get_2nd_png_att_without_accept_encoding_header() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4/icon.png",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_png_data(),
-        "received data for the 2nd png attachment is ok"
-    ),
-    ok.
-
-test_get_2nd_png_att_with_accept_encoding_gzip() ->
-    {ok, Code, Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4/icon.png",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_png_data(),
-        "received data for the 2nd png attachment is ok"
-    ),
-    ok.
-
-test_get_doc_with_2nd_text_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    TextAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttType = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"content_type">>]
-    ),
-    TextAttData = couch_util:get_nested_json_value(
-        TextAttJson,
-        [<<"data">>]
-    ),
-    etap:is(TextAttType, <<"text/plain">>, "attachment has type text/plain"),
-    %% check the attachment's data is the base64 encoding of the plain text
-    %% and not the base64 encoding of the gziped plain text
-    etap:is(
-        TextAttData,
-        base64:encode(test_text_data()),
-        "2nd text attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_2nd_text_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc3?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {TextAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
-    etap:is(
-        TextAttLength,
-        byte_size(test_text_data()),
-        "2nd text attachment stub length matches the uncompressed length"
-    ),
-    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
-    etap:is(
-        TextAttEncoding,
-        <<"gzip">>,
-        "2nd text attachment stub has the encoding field set to gzip"
-    ),
-    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
-    etap:is(
-        TextAttEncLength,
-        iolist_size(zlib:gzip(test_text_data())),
-        "2nd text attachment stub encoded_length matches the compressed length"
-    ),
-    ok.
-
-test_get_doc_with_2nd_png_att() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4?attachments=true",
-        [{"Accept", "application/json"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    PngAttJson = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttType = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"content_type">>]
-    ),
-    PngAttData = couch_util:get_nested_json_value(
-        PngAttJson,
-        [<<"data">>]
-    ),
-    etap:is(PngAttType, <<"image/png">>, "attachment has type image/png"),
-    etap:is(
-        PngAttData,
-        base64:encode(test_png_data()),
-        "2nd png attachment data is properly base64 encoded"
-    ),
-    ok.
-
-test_2nd_png_att_stub() ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        db_url() ++ "/testdoc4?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {PngAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"icon.png">>]
-    ),
-    PngAttLength = couch_util:get_value(<<"length">>, PngAttJson),
-    etap:is(
-        PngAttLength,
-        byte_size(test_png_data()),
-        "2nd png attachment stub length matches the uncompressed length"
-    ),
-    PngEncoding = couch_util:get_value(<<"encoding">>, PngAttJson),
-    etap:is(
-        PngEncoding,
-        undefined,
-        "2nd png attachment stub doesn't have an encoding field"
-    ),
-    PngEncLength = couch_util:get_value(<<"encoded_length">>, PngAttJson),
-    etap:is(
-        PngEncLength,
-        undefined,
-        "2nd png attachment stub doesn't have an encoded_length field"
-    ),
-    ok.
-
-test_already_compressed_att(DocUri, AttName) ->
-    test_get_already_compressed_att_with_accept_gzip(DocUri, AttName),
-    test_get_already_compressed_att_without_accept(DocUri, AttName),
-    test_get_already_compressed_att_stub(DocUri, AttName).
-
-test_get_already_compressed_att_with_accept_gzip(DocUri, AttName) ->
-    {ok, Code, Headers, Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, true, "received body is gziped"),
-    etap:is(
-        Body,
-        zlib:gzip(test_text_data()),
-        "received data for the already compressed attachment is ok"
-    ),
-    ok.
-
-test_get_already_compressed_att_without_accept(DocUri, AttName) ->
-    {ok, Code, Headers, Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers),
-    etap:is(Gziped, false, "received body is not gziped"),
-    etap:is(
-        Body,
-        test_text_data(),
-        "received data for the already compressed attachment is ok"
-    ),
-    ok.
-
-test_get_already_compressed_att_stub(DocUri, AttName) ->
-    {ok, Code, _Headers, Body} = test_util:request(
-        DocUri ++ "?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body),
-    {AttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, iolist_to_binary(AttName)]
-    ),
-    AttLength = couch_util:get_value(<<"length">>, AttJson),
-    etap:is(
-        AttLength,
-        iolist_size((zlib:gzip(test_text_data()))),
-        "Already compressed attachment stub length matches the "
-        "compressed length"
-    ),
-    Encoding = couch_util:get_value(<<"encoding">>, AttJson),
-    etap:is(
-        Encoding,
-        <<"gzip">>,
-        "Already compressed attachment stub has the encoding field set to gzip"
-    ),
-    EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
-    etap:is(
-        EncLength,
-        AttLength,
-        "Already compressed attachment stub encoded_length matches the "
-        "length field value"
-    ),
-    ok.
-
-test_create_already_compressed_att_with_invalid_content_encoding(
-    DocUri, AttName, AttData, Encoding) ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        DocUri ++ "/" ++ AttName,
-        [{"Content-Encoding", Encoding}, {"Content-Type", "text/plain"}],
-        put,
-        AttData),
-    etap:is(
-        Code,
-        415,
-        "Couldn't create an already compressed attachment using the "
-        "unsupported encoding '" ++ Encoding ++ "'"
-    ),
-    ok.
-
-test_compressible_type_with_parameters() ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/testdoc5/readme.txt",
-        [{"Content-Type", "text/plain; charset=UTF-8"}],
-        put,
-        test_text_data()),
-    etap:is(Code, 201, "Created text attachment with MIME type "
-        "'text/plain; charset=UTF-8' using the standalone api"),
-    {ok, Code2, Headers2, Body} = test_util:request(
-        db_url() ++ "/testdoc5/readme.txt",
-        [{"Accept-Encoding", "gzip"}],
-        get),
-    etap:is(Code2, 200, "HTTP response code is 200"),
-    Gziped = lists:member({"Content-Encoding", "gzip"}, Headers2),
-    etap:is(Gziped, true, "received body is gziped"),
-    Uncompressed = zlib:gunzip(iolist_to_binary(Body)),
-    etap:is(Uncompressed, test_text_data(), "received data is gzipped"),
-    {ok, Code3, _Headers3, Body3} = test_util:request(
-        db_url() ++ "/testdoc5?att_encoding_info=true",
-        [],
-        get),
-    etap:is(Code3, 200, "HTTP response code is 200"),
-    Json = ejson:decode(Body3),
-    {TextAttJson} = couch_util:get_nested_json_value(
-        Json,
-        [<<"_attachments">>, <<"readme.txt">>]
-    ),
-    TextAttLength = couch_util:get_value(<<"length">>, TextAttJson),
-    etap:is(
-        TextAttLength,
-        byte_size(test_text_data()),
-        "text attachment stub length matches the uncompressed length"
-    ),
-    TextAttEncoding = couch_util:get_value(<<"encoding">>, TextAttJson),
-    etap:is(
-        TextAttEncoding,
-        <<"gzip">>,
-        "text attachment stub has the encoding field set to gzip"
-    ),
-    TextAttEncLength = couch_util:get_value(<<"encoded_length">>, TextAttJson),
-    etap:is(
-        TextAttEncLength,
-        iolist_size(zlib:gzip(test_text_data())),
-        "text attachment stub encoded_length matches the compressed length"
-    ),
-    ok.
-
-test_png_data() ->
-    {ok, Data} = file:read_file(
-        test_util:source_file("share/www/image/logo.png")
-    ),
-    Data.
-
-test_text_data() ->
-    {ok, Data} = file:read_file(
-        test_util:source_file("README.rst")
-    ),
-    Data.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a36d6305/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index f54e927..94ff6f2 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    140-attachment-comp.t \
     150-invalid-view-seq.t \
     160-vhosts.t \
     170-os-daemons.es \


[20/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 075-auth-cache.t etap test suite to eunit

Timeouts are removed.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/c14cf259
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/c14cf259
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/c14cf259

Branch: refs/heads/1963-eunit
Commit: c14cf25946e851d9618a0493373ca7def8b966c2
Parents: 5cdf7a0
Author: Alexander Shorin <kx...@apache.org>
Authored: Thu May 22 13:07:24 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:26 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                |   1 +
 test/couchdb/couch_auth_cache_tests.erl | 239 +++++++++++++++++++++++
 test/etap/075-auth-cache.t              | 276 ---------------------------
 test/etap/Makefile.am                   |   1 -
 4 files changed, 240 insertions(+), 277 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/c14cf259/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 2c6dd64..20c1309 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -31,6 +31,7 @@ eunit_files = \
     couchdb_views_tests.erl \
     couch_changes_tests.erl \
     couchdb_update_conflicts_tests.erl \
+    couch_auth_cache_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c14cf259/test/couchdb/couch_auth_cache_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_auth_cache_tests.erl b/test/couchdb/couch_auth_cache_tests.erl
new file mode 100644
index 0000000..66b4c0c
--- /dev/null
+++ b/test/couchdb/couch_auth_cache_tests.erl
@@ -0,0 +1,239 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_auth_cache_tests).
+
+-include("../../src/couchdb/couch_db.hrl").
+-include("couchdb_tests.hrl").
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
+-define(SALT, <<"SALT">>).
+
+
+start() ->
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    ok.
+
+stop(_) ->
+    couch_server_sup:stop(),
+    ok.
+
+setup() ->
+    DbName = ?tempdb(),
+    couch_config:set("couch_httpd_auth", "authentication_db",
+                     ?b2l(DbName), false),
+    DbName.
+
+teardown(DbName) ->
+    ok = couch_server:delete(DbName, [?ADMIN_USER]),
+    ok.
+
+
+couch_auth_cache_test_() ->
+    {
+        "CouchDB auth cache tests",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            {
+                foreach,
+                fun setup/0, fun teardown/1,
+                [
+                    fun should_get_nil_on_missed_cache/1,
+                    fun should_get_right_password_hash/1,
+                    fun should_ensure_doc_hash_equals_cached_one/1,
+                    fun should_update_password/1,
+                    fun should_cleanup_cache_after_userdoc_deletion/1,
+                    fun should_restore_cache_after_userdoc_recreation/1,
+                    fun should_drop_cache_on_auth_db_change/1,
+                    fun should_restore_cache_on_auth_db_change/1,
+                    fun should_recover_cache_after_shutdown/1
+                ]
+            }
+        }
+    }.
+
+
+should_get_nil_on_missed_cache(_) ->
+    ?_assertEqual(nil, couch_auth_cache:get_user_creds("joe")).
+
+should_get_right_password_hash(DbName) ->
+    ?_assert(begin
+        PasswordHash = hash_password("pass1"),
+        {ok, _} = update_user_doc(DbName, "joe", "pass1"),
+        Creds = couch_auth_cache:get_user_creds("joe"),
+        ?assertEqual(PasswordHash,
+                      couch_util:get_value(<<"password_sha">>, Creds)),
+        true
+    end).
+
+should_ensure_doc_hash_equals_cached_one(DbName) ->
+    ?_assert(begin
+        {ok, _} = update_user_doc(DbName, "joe", "pass1"),
+        Creds = couch_auth_cache:get_user_creds("joe"),
+
+        CachedHash = couch_util:get_value(<<"password_sha">>, Creds),
+        StoredHash = get_user_doc_password_sha(DbName, "joe"),
+        ?assertEqual(StoredHash, CachedHash),
+        true
+    end).
+
+should_update_password(DbName) ->
+    ?_assert(begin
+        PasswordHash = hash_password("pass2"),
+        {ok, Rev} = update_user_doc(DbName, "joe", "pass1"),
+        {ok, _} = update_user_doc(DbName, "joe", "pass2", Rev),
+        Creds = couch_auth_cache:get_user_creds("joe"),
+        ?assertEqual(PasswordHash,
+                      couch_util:get_value(<<"password_sha">>, Creds)),
+        true
+    end).
+
+should_cleanup_cache_after_userdoc_deletion(DbName) ->
+    ?_assert(begin
+        {ok, _} = update_user_doc(DbName, "joe", "pass1"),
+        delete_user_doc(DbName, "joe"),
+        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe")),
+        true
+    end).
+
+should_restore_cache_after_userdoc_recreation(DbName) ->
+    ?_assert(begin
+        PasswordHash = hash_password("pass5"),
+        {ok, _} = update_user_doc(DbName, "joe", "pass1"),
+        delete_user_doc(DbName, "joe"),
+        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe")),
+
+        {ok, _} = update_user_doc(DbName, "joe", "pass5"),
+        Creds = couch_auth_cache:get_user_creds("joe"),
+
+        ?assertEqual(PasswordHash,
+                      couch_util:get_value(<<"password_sha">>, Creds)),
+        true
+    end).
+
+should_drop_cache_on_auth_db_change(DbName) ->
+    ?_assert(begin
+        {ok, _} = update_user_doc(DbName, "joe", "pass1"),
+        full_commit(DbName),
+        couch_config:set("couch_httpd_auth", "authentication_db",
+                         ?b2l(?tempdb()), false),
+        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe")),
+        true
+    end).
+
+should_restore_cache_on_auth_db_change(DbName) ->
+    ?_assert(begin
+        PasswordHash = hash_password("pass1"),
+        {ok, _} = update_user_doc(DbName, "joe", "pass1"),
+        Creds = couch_auth_cache:get_user_creds("joe"),
+        full_commit(DbName),
+
+        DbName1 = ?tempdb(),
+        couch_config:set("couch_httpd_auth", "authentication_db",
+                         ?b2l(DbName1), false),
+
+        {ok, _} = update_user_doc(DbName1, "joe", "pass5"),
+        full_commit(DbName1),
+
+        couch_config:set("couch_httpd_auth", "authentication_db",
+                         ?b2l(DbName), false),
+
+        Creds = couch_auth_cache:get_user_creds("joe"),
+        ?assertEqual(PasswordHash,
+                      couch_util:get_value(<<"password_sha">>, Creds)),
+        true
+    end).
+
+should_recover_cache_after_shutdown(DbName) ->
+    ?_assert(begin
+        PasswordHash = hash_password("pass2"),
+        {ok, Rev0} = update_user_doc(DbName, "joe", "pass1"),
+        {ok, Rev1} = update_user_doc(DbName, "joe", "pass2", Rev0),
+        full_commit(DbName),
+        shutdown_db(DbName),
+        {ok, Rev1} = get_doc_rev(DbName, "joe"),
+        ?assertEqual(PasswordHash, get_user_doc_password_sha(DbName, "joe")),
+        true
+    end).
+
+
+update_user_doc(DbName, UserName, Password) ->
+    update_user_doc(DbName, UserName, Password, nil).
+
+update_user_doc(DbName, UserName, Password, Rev) ->
+    User = iolist_to_binary(UserName),
+    Doc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"org.couchdb.user:", User/binary>>},
+        {<<"name">>, User},
+        {<<"type">>, <<"user">>},
+        {<<"salt">>, ?SALT},
+        {<<"password_sha">>, hash_password(Password)},
+        {<<"roles">>, []}
+    ] ++ case Rev of
+            nil -> [];
+            _ ->   [{<<"_rev">>, Rev}]
+         end
+    }),
+    {ok, AuthDb} = couch_db:open_int(DbName, [?ADMIN_USER]),
+    {ok, NewRev} = couch_db:update_doc(AuthDb, Doc, []),
+    ok = couch_db:close(AuthDb),
+    {ok, couch_doc:rev_to_str(NewRev)}.
+
+hash_password(Password) ->
+    ?l2b(couch_util:to_hex(crypto:sha(iolist_to_binary([Password, ?SALT])))).
+
+shutdown_db(DbName) ->
+    {ok, AuthDb} = couch_db:open_int(DbName, [?ADMIN_USER]),
+    ok = couch_db:close(AuthDb),
+    couch_util:shutdown_sync(AuthDb#db.main_pid),
+    ok = timer:sleep(1000).
+
+get_doc_rev(DbName, UserName) ->
+    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
+    {ok, AuthDb} = couch_db:open_int(DbName, [?ADMIN_USER]),
+    UpdateRev =
+    case couch_db:open_doc(AuthDb, DocId, []) of
+    {ok, Doc} ->
+        {Props} = couch_doc:to_json_obj(Doc, []),
+        couch_util:get_value(<<"_rev">>, Props);
+    {not_found, missing} ->
+        nil
+    end,
+    ok = couch_db:close(AuthDb),
+    {ok, UpdateRev}.
+
+get_user_doc_password_sha(DbName, UserName) ->
+    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
+    {ok, AuthDb} = couch_db:open_int(DbName, [?ADMIN_USER]),
+    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
+    ok = couch_db:close(AuthDb),
+    {Props} = couch_doc:to_json_obj(Doc, []),
+    couch_util:get_value(<<"password_sha">>, Props).
+
+delete_user_doc(DbName, UserName) ->
+    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
+    {ok, AuthDb} = couch_db:open_int(DbName, [?ADMIN_USER]),
+    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
+    {Props} = couch_doc:to_json_obj(Doc, []),
+    DeletedDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DocId},
+        {<<"_rev">>, couch_util:get_value(<<"_rev">>, Props)},
+        {<<"_deleted">>, true}
+    ]}),
+    {ok, _} = couch_db:update_doc(AuthDb, DeletedDoc, []),
+    ok = couch_db:close(AuthDb).
+
+full_commit(DbName) ->
+    {ok, AuthDb} = couch_db:open_int(DbName, [?ADMIN_USER]),
+    {ok, _} = couch_db:ensure_full_commit(AuthDb),
+    ok = couch_db:close(AuthDb).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c14cf259/test/etap/075-auth-cache.t
----------------------------------------------------------------------
diff --git a/test/etap/075-auth-cache.t b/test/etap/075-auth-cache.t
deleted file mode 100755
index 623884b..0000000
--- a/test/etap/075-auth-cache.t
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--record(db, {
-    main_pid = nil,
-    update_pid = nil,
-    compactor_pid = nil,
-    instance_start_time, % number of microsecs since jan 1 1970 as a binary string
-    fd,
-    updater_fd,
-    fd_ref_counter,
-    header,
-    committed_update_seq,
-    fulldocinfo_by_id_btree,
-    docinfo_by_seq_btree,
-    local_docs_btree,
-    update_seq,
-    name,
-    filepath,
-    validate_doc_funs = [],
-    security = [],
-    security_ptr = nil,
-    user_ctx = #user_ctx{},
-    waiting_delayed_commit = nil,
-    revs_limit = 1000,
-    fsync_options = [],
-    options = [],
-    compression,
-    before_doc_update = nil, % nil | fun(Doc, Db) -> NewDoc
-    after_doc_read = nil     % nil | fun(Doc, Db) -> NewDoc
-}).
-
-auth_db_name() -> <<"couch_test_auth_db">>.
-auth_db_2_name() -> <<"couch_test_auth_db_2">>.
-salt() -> <<"SALT">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(19),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    OrigName = couch_config:get("couch_httpd_auth", "authentication_db"),
-    couch_config:set(
-        "couch_httpd_auth", "authentication_db",
-        binary_to_list(auth_db_name()), false),
-    delete_db(auth_db_name()),
-    delete_db(auth_db_2_name()),
-
-    test_auth_db_crash(),
-
-    couch_config:set("couch_httpd_auth", "authentication_db", OrigName, false),
-    delete_db(auth_db_name()),
-    delete_db(auth_db_2_name()),
-    couch_server_sup:stop(),
-    ok.
-
-
-test_auth_db_crash() ->
-    Creds0 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(Creds0, nil, "Got nil when getting joe's credentials"),
-
-    etap:diag("Adding first version of Joe's user doc"),
-    PasswordHash1 = hash_password("pass1"),
-    {ok, Rev1} = update_user_doc(auth_db_name(), "joe", "pass1"),
-
-    Creds1 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds1), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds1), PasswordHash1,
-            "Cached credentials have the right password"),
-
-    etap:diag("Updating Joe's user doc password"),
-    PasswordHash2 = hash_password("pass2"),
-    {ok, _Rev2} = update_user_doc(auth_db_name(), "joe", "pass2", Rev1),
-
-    Creds2 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds2), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds2), PasswordHash2,
-            "Cached credentials have the new password"),
-
-    etap:diag("Shutting down the auth database process"),
-    shutdown_db(auth_db_name()),
-
-    {ok, UpdateRev} = get_doc_rev(auth_db_name(), "joe"),
-    PasswordHash3 = hash_password("pass3"),
-    {ok, _Rev3} = update_user_doc(auth_db_name(), "joe", "pass3", UpdateRev),
-
-    etap:is(get_user_doc_password_sha(auth_db_name(), "joe"),
-            PasswordHash3,
-            "Latest Joe's doc revision has the new password hash"),
-
-    Creds3 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds3), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds3), PasswordHash3,
-            "Cached credentials have the new password"),
-
-    etap:diag("Deleting Joe's user doc"),
-    delete_user_doc(auth_db_name(), "joe"),
-    Creds4 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(nil, Creds4,
-            "Joe's credentials not found in cache after user doc was deleted"),
-
-    etap:diag("Adding new user doc for Joe"),
-    PasswordHash5 = hash_password("pass5"),
-    {ok, _NewRev1} = update_user_doc(auth_db_name(), "joe", "pass5"),
-
-    Creds5 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds5), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds5), PasswordHash5,
-            "Cached credentials have the right password"),
-
-    full_commit(auth_db_name()),
-
-    etap:diag("Changing the auth database"),
-    couch_config:set(
-        "couch_httpd_auth", "authentication_db",
-        binary_to_list(auth_db_2_name()), false),
-    ok = timer:sleep(500),
-
-    Creds6 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(nil, Creds6,
-            "Joe's credentials not found in cache after auth database changed"),
-
-    etap:diag("Adding first version of Joe's user doc to new auth database"),
-    PasswordHash7 = hash_password("pass7"),
-    {ok, _} = update_user_doc(auth_db_2_name(), "joe", "pass7"),
-
-    Creds7 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds7), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds7), PasswordHash7,
-            "Cached credentials have the right password"),
-
-    etap:diag("Shutting down the auth database process"),
-    shutdown_db(auth_db_2_name()),
-
-    {ok, UpdateRev2} = get_doc_rev(auth_db_2_name(), "joe"),
-    PasswordHash8 = hash_password("pass8"),
-    {ok, _Rev8} = update_user_doc(auth_db_2_name(), "joe", "pass8", UpdateRev2),
-
-    etap:is(get_user_doc_password_sha(auth_db_2_name(), "joe"),
-            PasswordHash8,
-            "Latest Joe's doc revision has the new password hash"),
-
-    Creds8 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(is_list(Creds8), true, "Got joe's credentials from cache"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds8), PasswordHash8,
-            "Cached credentials have the new password"),
-
-    etap:diag("Changing the auth database again"),
-    couch_config:set(
-        "couch_httpd_auth", "authentication_db",
-        binary_to_list(auth_db_name()), false),
-    ok = timer:sleep(500),
-
-    Creds9 = couch_auth_cache:get_user_creds("joe"),
-    etap:is(Creds9, Creds5,
-            "Got same credentials as before the firt auth database change"),
-    etap:is(couch_util:get_value(<<"password_sha">>, Creds9), PasswordHash5,
-            "Cached credentials have the right password"),
-    ok.
-
-
-update_user_doc(DbName, UserName, Password) ->
-    update_user_doc(DbName, UserName, Password, nil).
-
-update_user_doc(DbName, UserName, Password, Rev) ->
-    User = iolist_to_binary(UserName),
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"org.couchdb.user:", User/binary>>},
-        {<<"name">>, User},
-        {<<"type">>, <<"user">>},
-        {<<"salt">>, salt()},
-        {<<"password_sha">>, hash_password(Password)},
-        {<<"roles">>, []}
-    ] ++ case Rev of
-        nil -> [];
-        _ ->   [{<<"_rev">>, Rev}]
-    end}),
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, NewRev} = couch_db:update_doc(AuthDb, Doc, []),
-    ok = couch_db:close(AuthDb),
-    {ok, couch_doc:rev_to_str(NewRev)}.
-
-
-hash_password(Password) ->
-    list_to_binary(
-        couch_util:to_hex(crypto:sha(iolist_to_binary([Password, salt()])))).
-
-
-shutdown_db(DbName) ->
-    {ok, AuthDb} = open_auth_db(DbName),
-    ok = couch_db:close(AuthDb),
-    couch_util:shutdown_sync(AuthDb#db.main_pid),
-    ok = timer:sleep(1000).
-
-
-get_doc_rev(DbName, UserName) ->
-    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
-    {ok, AuthDb} = open_auth_db(DbName),
-    UpdateRev =
-    case couch_db:open_doc(AuthDb, DocId, []) of
-    {ok, Doc} ->
-        {Props} = couch_doc:to_json_obj(Doc, []),
-        couch_util:get_value(<<"_rev">>, Props);
-    {not_found, missing} ->
-        nil
-    end,
-    ok = couch_db:close(AuthDb),
-    {ok, UpdateRev}.
-
-
-get_user_doc_password_sha(DbName, UserName) ->
-    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
-    ok = couch_db:close(AuthDb),
-    {Props} = couch_doc:to_json_obj(Doc, []),
-    couch_util:get_value(<<"password_sha">>, Props).
-
-
-delete_user_doc(DbName, UserName) ->
-    DocId = iolist_to_binary([<<"org.couchdb.user:">>, UserName]),
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, Doc} = couch_db:open_doc(AuthDb, DocId, []),
-    {Props} = couch_doc:to_json_obj(Doc, []),
-    DeletedDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, DocId},
-        {<<"_rev">>, couch_util:get_value(<<"_rev">>, Props)},
-        {<<"_deleted">>, true}
-    ]}),
-    {ok, _} = couch_db:update_doc(AuthDb, DeletedDoc, []),
-    ok = couch_db:close(AuthDb).
-
-
-full_commit(DbName) ->
-    {ok, AuthDb} = open_auth_db(DbName),
-    {ok, _} = couch_db:ensure_full_commit(AuthDb),
-    ok = couch_db:close(AuthDb).
-
-
-open_auth_db(DbName) ->
-    couch_db:open_int(
-        DbName, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).
-
-
-delete_db(Name) ->
-    couch_server:delete(
-        Name, [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c14cf259/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 446a3f3..3673ef9 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    075-auth-cache.t \
     076-file-compression.t \
     077-couch-db-fast-db-delete-create.t \
     080-config-get-set.t \


[17/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 073-changes.t etap test suite to eunit

For heartbeats test they don't being counted anymore since their
amount is heavy depends from the overall system performance and some
assertions may fail or not because of that. Instead of this, we just
ensure that their amount is going to increase over the time.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/b3379023
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/b3379023
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/b3379023

Branch: refs/heads/1963-eunit
Commit: b337902398ef621bfae0137e1661383da1c77c0f
Parents: d36c798
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue May 20 07:19:22 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am             |   1 +
 test/couchdb/couch_changes_tests.erl | 603 ++++++++++++++++++++++++++++++
 test/etap/073-changes.t              | 558 ---------------------------
 test/etap/Makefile.am                |   1 -
 4 files changed, 604 insertions(+), 559 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/b3379023/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 8d45866..58429e7 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -29,6 +29,7 @@ eunit_files = \
     couch_key_tree_tests.erl \
     couch_db_tests.erl \
     couchdb_views_tests.erl \
+    couch_changes_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/b3379023/test/couchdb/couch_changes_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_changes_tests.erl b/test/couchdb/couch_changes_tests.erl
new file mode 100644
index 0000000..67d0d45
--- /dev/null
+++ b/test/couchdb/couch_changes_tests.erl
@@ -0,0 +1,603 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_changes_tests).
+
+-include_lib("../../src/couchdb/couch_db.hrl").
+-include_lib("couchdb_tests.hrl").
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles = [<<"_admin">>]}}).
+-define(TIMEOUT, 3000).
+-define(TEST_TIMEOUT, 10000).
+
+
+-record(row, {
+    id,
+    seq,
+    deleted = false
+}).
+
+
+setup_app() ->
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    ok.
+
+teardown_app(_) ->
+    couch_server_sup:stop(),
+    ok.
+
+setup() ->
+    DbName = ?tempdb(),
+    {ok, Db} = create_db(DbName),
+    Revs = [R || {ok, R} <- [
+        save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"doc5">>}]})
+    ]],
+    Rev = lists:nth(3, Revs),
+    {ok, Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev}]}),
+    Revs1 = Revs ++ [Rev1],
+    Revs2 = Revs1 ++ [R || {ok, R} <- [
+        save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"_design/foo">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
+        save_doc(Db, {[{<<"_id">>, <<"doc8">>}]})
+    ]],
+    {DbName, list_to_tuple(Revs2)}.
+
+teardown({DbName, _}) ->
+    delete_db(DbName),
+    ok.
+
+
+changes_test_() ->
+    {
+        "Changes feeed",
+        {
+            setup,
+            fun setup_app/0, fun teardown_app/1,
+            [
+                filter_by_doc_id(),
+                filter_by_design(),
+                continuous_feed(),
+                filter_by_custom_function()
+            ]
+        }
+    }.
+
+filter_by_doc_id() ->
+    {
+        "Filter _doc_id",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_filter_by_specific_doc_ids/1,
+                fun should_filter_by_specific_doc_ids_descending/1,
+                fun should_filter_by_specific_doc_ids_with_since/1,
+                fun should_filter_by_specific_doc_ids_no_result/1,
+                fun should_handle_deleted_docs/1
+            ]
+        }
+    }.
+
+filter_by_design() ->
+    {
+        "Filter _design",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_emit_only_design_documents/1
+            ]
+        }
+    }.
+
+filter_by_custom_function() ->
+    {
+        "Filter function",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_receive_heartbeats/1
+            ]
+        }
+    }.
+
+continuous_feed() ->
+    {
+        "Continuous Feed",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_filter_continuous_feed_by_specific_doc_ids/1
+            ]
+        }
+    }.
+
+
+should_filter_by_specific_doc_ids({DbName, _}) ->
+    ?_test(
+        begin
+            ChangesArgs = #changes_args{
+                filter = "_doc_ids"
+            },
+            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop(Consumer),
+
+            ?assertEqual(2, length(Rows)),
+            [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
+            ?assertEqual(<<"doc4">>, Id1),
+            ?assertEqual(4, Seq1),
+            ?assertEqual(<<"doc3">>, Id2),
+            ?assertEqual(6, Seq2),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_filter_by_specific_doc_ids_descending({DbName, _}) ->
+    ?_test(
+        begin
+            ChangesArgs = #changes_args{
+                filter = "_doc_ids",
+                dir = rev
+            },
+            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            couch_db:close(Db),
+            stop(Consumer),
+
+            ?assertEqual(2, length(Rows)),
+            [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
+            ?assertEqual(<<"doc3">>, Id1),
+            ?assertEqual(6, Seq1),
+            ?assertEqual(<<"doc4">>, Id2),
+            ?assertEqual(4, Seq2),
+            ?assertEqual(4, LastSeq)
+        end).
+
+should_filter_by_specific_doc_ids_with_since({DbName, _}) ->
+    ?_test(
+        begin
+            ChangesArgs = #changes_args{
+                filter = "_doc_ids",
+                since = 5
+            },
+            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop(Consumer),
+
+            ?assertEqual(1, length(Rows)),
+            [#row{seq = Seq1, id = Id1}] = Rows,
+            ?assertEqual(<<"doc3">>, Id1),
+            ?assertEqual(6, Seq1),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_filter_by_specific_doc_ids_no_result({DbName, _}) ->
+    ?_test(
+        begin
+            ChangesArgs = #changes_args{
+                filter = "_doc_ids",
+                since = 6
+            },
+            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop(Consumer),
+
+            ?assertEqual(0, length(Rows)),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_handle_deleted_docs({DbName, Revs}) ->
+    ?_test(
+        begin
+            Rev3_2 = element(6, Revs),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            {ok, _} = save_doc(
+                Db,
+                {[{<<"_id">>, <<"doc3">>},
+                  {<<"_deleted">>, true},
+                  {<<"_rev">>, Rev3_2}]}),
+
+            ChangesArgs = #changes_args{
+                filter = "_doc_ids",
+                since = 9
+            },
+            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+
+            {Rows, LastSeq} = wait_finished(Consumer),
+            couch_db:close(Db),
+            stop(Consumer),
+
+            ?assertEqual(1, length(Rows)),
+            ?assertMatch(
+                [#row{seq = LastSeq, id = <<"doc3">>, deleted = true}],
+                Rows
+            ),
+            ?assertEqual(11, LastSeq)
+        end).
+
+should_filter_continuous_feed_by_specific_doc_ids({DbName, Revs}) ->
+    ?_test(
+        begin
+            {ok, Db} = couch_db:open_int(DbName, []),
+            ChangesArgs = #changes_args{
+                filter = "_doc_ids",
+                feed = "continuous"
+            },
+            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+            pause(Consumer),
+
+            Rows = get_rows(Consumer),
+            ?assertEqual(2, length(Rows)),
+            [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
+            ?assertEqual(<<"doc4">>, Id1),
+            ?assertEqual(4, Seq1),
+            ?assertEqual(<<"doc3">>, Id2),
+            ?assertEqual(6, Seq2),
+
+            clear_rows(Consumer),
+            {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
+            {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
+            unpause(Consumer),
+            pause(Consumer),
+            ?assertEqual([], get_rows(Consumer)),
+
+            Rev4 = element(4, Revs),
+            Rev3_2 = element(6, Revs),
+            {ok, Rev4_2} = save_doc(Db, {[{<<"_id">>, <<"doc4">>},
+                                          {<<"_rev">>, Rev4}]}),
+            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
+            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc4">>},
+                                     {<<"_rev">>, Rev4_2}]}),
+            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
+            {ok, Rev3_3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>},
+                                          {<<"_rev">>, Rev3_2}]}),
+            unpause(Consumer),
+            pause(Consumer),
+
+            NewRows = get_rows(Consumer),
+            ?assertEqual(2, length(NewRows)),
+            [Row14, Row16] = NewRows,
+            ?assertEqual(<<"doc4">>, Row14#row.id),
+            ?assertEqual(15, Row14#row.seq),
+            ?assertEqual(<<"doc3">>, Row16#row.id),
+            ?assertEqual(17, Row16#row.seq),
+
+            clear_rows(Consumer),
+            {ok, _Rev3_4} = save_doc(Db, {[{<<"_id">>, <<"doc3">>},
+                                           {<<"_rev">>, Rev3_3}]}),
+            unpause(Consumer),
+            pause(Consumer),
+
+            FinalRows = get_rows(Consumer),
+
+            unpause(Consumer),
+            stop(Consumer),
+
+            ?assertMatch([#row{seq = 18, id = <<"doc3">>}], FinalRows)
+        end).
+
+should_emit_only_design_documents({DbName, Revs}) ->
+    ?_test(
+        begin
+            ChangesArgs = #changes_args{
+                filter = "_design"
+            },
+            Consumer = spawn_consumer(DbName, ChangesArgs, {json_req, null}),
+
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+
+            ?assertEqual(1, length(Rows)),
+            ?assertEqual(UpSeq, LastSeq),
+            ?assertEqual([#row{seq = 8, id = <<"_design/foo">>}], Rows),
+
+            stop(Consumer),
+
+            {ok, Db2} = couch_db:open_int(DbName, [?ADMIN_USER]),
+            {ok, _} = save_doc(Db2, {[{<<"_id">>, <<"_design/foo">>},
+                                      {<<"_rev">>, element(8, Revs)},
+                                      {<<"_deleted">>, true}]}),
+
+            Consumer2 = spawn_consumer(DbName, ChangesArgs, {json_req, null}),
+
+            {Rows2, LastSeq2} = wait_finished(Consumer2),
+            UpSeq2 = UpSeq + 1,
+            couch_db:close(Db2),
+
+            ?assertEqual(1, length(Rows2)),
+            ?assertEqual(UpSeq2, LastSeq2),
+            ?assertEqual([#row{seq = 11,
+                               id = <<"_design/foo">>,
+                               deleted = true}],
+                          Rows2)
+        end).
+
+should_receive_heartbeats(_) ->
+    {timeout, ?TEST_TIMEOUT div 1000,
+     ?_test(
+         begin
+             DbName = ?tempdb(),
+             Timeout = 100,
+             {ok, Db} = create_db(DbName),
+
+             {ok, _} = save_doc(Db, {[
+                 {<<"_id">>, <<"_design/filtered">>},
+                 {<<"language">>, <<"javascript">>},
+                     {<<"filters">>, {[
+                         {<<"foo">>, <<"function(doc) {
+                             return ['doc10', 'doc11', 'doc12'].indexOf(doc._id) != -1;}">>
+                     }]}}
+             ]}),
+
+             ChangesArgs = #changes_args{
+                 filter = "filtered/foo",
+                 feed = "continuous",
+                 timeout = 10000,
+                 heartbeat = 1000
+             },
+             Consumer = spawn_consumer(DbName, ChangesArgs, {json_req, null}),
+
+             {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
+
+             Heartbeats = get_heartbeats(Consumer),
+             ?assert(Heartbeats > 0),
+
+             {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
+
+             Heartbeats2 = get_heartbeats(Consumer),
+             ?assert(Heartbeats2 > Heartbeats),
+
+             Rows = get_rows(Consumer),
+             ?assertEqual(3, length(Rows)),
+
+             {ok, _Rev13} = save_doc(Db, {[{<<"_id">>, <<"doc13">>}]}),
+             timer:sleep(Timeout),
+             {ok, _Rev14} = save_doc(Db, {[{<<"_id">>, <<"doc14">>}]}),
+             timer:sleep(Timeout),
+
+             Heartbeats3 = get_heartbeats(Consumer),
+             ?assert(Heartbeats3 > Heartbeats2)
+        end)}.
+
+
+save_doc(Db, Json) ->
+    Doc = couch_doc:from_json_obj(Json),
+    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
+    {ok, couch_doc:rev_to_str(Rev)}.
+
+get_rows(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {get_rows, Ref},
+    Resp = receive
+        {rows, Ref, Rows} ->
+            Rows
+    after ?TIMEOUT ->
+        timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+get_heartbeats(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {get_heartbeats, Ref},
+    Resp = receive
+        {hearthbeats, Ref, HeartBeats} ->
+            HeartBeats
+    after ?TIMEOUT ->
+        timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+clear_rows(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {reset, Ref},
+    Resp = receive
+        {ok, Ref} ->
+            ok
+    after ?TIMEOUT ->
+        timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+stop(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {stop, Ref},
+    Resp = receive
+        {ok, Ref} ->
+            ok
+    after ?TIMEOUT ->
+        timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+pause(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {pause, Ref},
+    Resp = receive
+        {paused, Ref} ->
+            ok
+    after ?TIMEOUT ->
+        timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+unpause(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {continue, Ref},
+    Resp = receive
+        {ok, Ref} ->
+            ok
+    after ?TIMEOUT ->
+       timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+wait_finished(_Consumer) ->
+    Resp = receive
+        {consumer_finished, Rows, LastSeq} ->
+            {Rows, LastSeq}
+    after ?TIMEOUT ->
+        timeout
+    end,
+    ?assertNotMatch(timeout, Resp),
+    Resp.
+
+spawn_consumer(DbName, ChangesArgs0, Req) ->
+    Parent = self(),
+    spawn(fun() ->
+        put(heartbeat_count, 0),
+        Callback = fun
+            ({change, {Change}, _}, _, Acc) ->
+                Id = couch_util:get_value(<<"id">>, Change),
+                Seq = couch_util:get_value(<<"seq">>, Change),
+                Del = couch_util:get_value(<<"deleted">>, Change, false),
+                [#row{id = Id, seq = Seq, deleted = Del} | Acc];
+            ({stop, LastSeq}, _, Acc) ->
+                Parent ! {consumer_finished, lists:reverse(Acc), LastSeq},
+                stop_loop(Parent, Acc);
+            (timeout, _, Acc) ->
+                put(heartbeat_count, get(heartbeat_count) + 1),
+                maybe_pause(Parent, Acc);
+            (_, _, Acc) ->
+                maybe_pause(Parent, Acc)
+        end,
+        {ok, Db} = couch_db:open_int(DbName, []),
+        ChangesArgs = case (ChangesArgs0#changes_args.timeout =:= undefined)
+            andalso (ChangesArgs0#changes_args.heartbeat =:= undefined) of
+            true ->
+                ChangesArgs0#changes_args{timeout = 10, heartbeat = 10};
+            false ->
+                ChangesArgs0
+        end,
+        FeedFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
+        try
+            FeedFun({Callback, []})
+        catch throw:{stop, _} ->
+            ok
+        end,
+        catch couch_db:close(Db)
+    end).
+
+maybe_pause(Parent, Acc) ->
+    receive
+        {get_rows, Ref} ->
+            Parent ! {rows, Ref, lists:reverse(Acc)},
+            maybe_pause(Parent, Acc);
+        {get_heartbeats, Ref} ->
+            Parent ! {hearthbeats, Ref, get(heartbeat_count)},
+            maybe_pause(Parent, Acc);
+        {reset, Ref} ->
+            Parent ! {ok, Ref},
+            maybe_pause(Parent, []);
+        {pause, Ref} ->
+            Parent ! {paused, Ref},
+            pause_loop(Parent, Acc);
+        {stop, Ref} ->
+            Parent ! {ok, Ref},
+            throw({stop, Acc});
+        V ->
+            ?debugFmt("WTF: ~p~n", [V])
+    after 0 ->
+        Acc
+    end.
+
+pause_loop(Parent, Acc) ->
+    receive
+        {stop, Ref} ->
+            Parent ! {ok, Ref},
+            throw({stop, Acc});
+        {reset, Ref} ->
+            Parent ! {ok, Ref},
+            pause_loop(Parent, []);
+        {continue, Ref} ->
+            Parent ! {ok, Ref},
+            Acc;
+        {get_rows, Ref} ->
+            Parent ! {rows, Ref, lists:reverse(Acc)},
+            pause_loop(Parent, Acc)
+    end.
+
+stop_loop(Parent, Acc) ->
+    receive
+        {get_rows, Ref} ->
+            Parent ! {rows, Ref, lists:reverse(Acc)},
+            stop_loop(Parent, Acc);
+        {stop, Ref} ->
+            Parent ! {ok, Ref},
+            Acc
+    end.
+
+create_db(DbName) ->
+    couch_db:create(DbName, [?ADMIN_USER, overwrite]).
+
+delete_db(DbName) ->
+    ok = couch_server:delete(DbName, [?ADMIN_USER]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/b3379023/test/etap/073-changes.t
----------------------------------------------------------------------
diff --git a/test/etap/073-changes.t b/test/etap/073-changes.t
deleted file mode 100755
index d632c2f..0000000
--- a/test/etap/073-changes.t
+++ /dev/null
@@ -1,558 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% Verify that compacting databases that are being used as the source or
-% target of a replication doesn't affect the replication and that the
-% replication doesn't hold their reference counters forever.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--record(changes_args, {
-    feed = "normal",
-    dir = fwd,
-    since = 0,
-    limit = 1000000000000000,
-    style = main_only,
-    heartbeat,
-    timeout,
-    filter = "",
-    filter_fun,
-    filter_args = [],
-    include_docs = false,
-    doc_options = [],
-    conflicts = false,
-    db_open_options = []
-}).
-
--record(row, {
-    id,
-    seq,
-    deleted = false
-}).
-
-
-test_db_name() -> <<"couch_test_changes">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(43),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-
-    test_by_doc_ids(),
-    test_by_doc_ids_with_since(),
-    test_by_doc_ids_continuous(),
-    test_design_docs_only(),
-    test_heartbeat(),
-
-    couch_server_sup:stop(),
-    ok.
-
-
-test_by_doc_ids() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    {ok, _Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-
-    etap:diag("Folding changes in ascending order with _doc_ids filter"),
-    ChangesArgs = #changes_args{
-        filter = "_doc_ids"
-    },
-    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
-
-    {Rows, LastSeq} = wait_finished(Consumer),
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    UpSeq = couch_db:get_update_seq(Db2),
-    couch_db:close(Db2),
-    etap:is(length(Rows), 2, "Received 2 changes rows"),
-    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
-    [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-    etap:is(Id1, <<"doc4">>, "First row is for doc doc4"),
-    etap:is(Seq1, 4, "First row has seq 4"),
-    etap:is(Id2, <<"doc3">>, "Second row is for doc doc3"),
-    etap:is(Seq2, 6, "Second row has seq 6"),
-
-    stop(Consumer),
-    etap:diag("Folding changes in descending order with _doc_ids filter"),
-    ChangesArgs2 = #changes_args{
-        filter = "_doc_ids",
-        dir = rev
-    },
-    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs2, Req),
-
-    {Rows2, LastSeq2} = wait_finished(Consumer2),
-    etap:is(length(Rows2), 2, "Received 2 changes rows"),
-    etap:is(LastSeq2, 4, "LastSeq is 4"),
-    [#row{seq = Seq1_2, id = Id1_2}, #row{seq = Seq2_2, id = Id2_2}] = Rows2,
-    etap:is(Id1_2, <<"doc3">>, "First row is for doc doc3"),
-    etap:is(Seq1_2, 6, "First row has seq 4"),
-    etap:is(Id2_2, <<"doc4">>, "Second row is for doc doc4"),
-    etap:is(Seq2_2, 4, "Second row has seq 6"),
-
-    stop(Consumer2),
-    delete_db(Db).
-
-
-test_by_doc_ids_with_since() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    {ok, Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-
-    ChangesArgs = #changes_args{
-        filter = "_doc_ids",
-        since = 5
-    },
-    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
-
-    {Rows, LastSeq} = wait_finished(Consumer),
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    UpSeq = couch_db:get_update_seq(Db2),
-    couch_db:close(Db2),
-    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows), 1, "Received 1 changes rows"),
-    [#row{seq = Seq1, id = Id1}] = Rows,
-    etap:is(Id1, <<"doc3">>, "First row is for doc doc3"),
-    etap:is(Seq1, 6, "First row has seq 6"),
-
-    stop(Consumer),
-
-    ChangesArgs2 = #changes_args{
-        filter = "_doc_ids",
-        since = 6
-    },
-    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs2, Req),
-
-    {Rows2, LastSeq2} = wait_finished(Consumer2),
-    {ok, Db3} = couch_db:open_int(test_db_name(), []),
-    UpSeq2 = couch_db:get_update_seq(Db3),
-    couch_db:close(Db3),
-    etap:is(LastSeq2, UpSeq2, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows2), 0, "Received 0 change rows"),
-
-    stop(Consumer2),
-
-    {ok, _Rev3_3} = save_doc(
-        Db,
-        {[{<<"_id">>, <<"doc3">>}, {<<"_deleted">>, true}, {<<"_rev">>, Rev3_2}]}),
-
-    ChangesArgs3 = #changes_args{
-        filter = "_doc_ids",
-        since = 9
-    },
-    Consumer3 = spawn_consumer(test_db_name(), ChangesArgs3, Req),
-
-    {Rows3, LastSeq3} = wait_finished(Consumer3),
-    {ok, Db4} = couch_db:open_int(test_db_name(), []),
-    UpSeq3 = couch_db:get_update_seq(Db4),
-    couch_db:close(Db4),
-    etap:is(LastSeq3, UpSeq3, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows3), 1, "Received 1 changes rows"),
-    etap:is(
-        [#row{seq = LastSeq3, id = <<"doc3">>, deleted = true}],
-        Rows3,
-        "Received row with doc3 deleted"),
-
-    stop(Consumer3),
-
-    delete_db(Db).
-
-
-test_by_doc_ids_continuous() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    {ok, Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    {ok, Rev3_2} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3}]}),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-
-    ChangesArgs = #changes_args{
-        filter = "_doc_ids",
-        feed = "continuous"
-    },
-    DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-    Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, Req),
-
-    pause(Consumer),
-    Rows = get_rows(Consumer),
-
-    etap:is(length(Rows), 2, "Received 2 changes rows"),
-    [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-    etap:is(Id1, <<"doc4">>, "First row is for doc doc4"),
-    etap:is(Seq1, 4, "First row has seq 4"),
-    etap:is(Id2, <<"doc3">>, "Second row is for doc doc3"),
-    etap:is(Seq2, 6, "Second row has seq 6"),
-
-    clear_rows(Consumer),
-    {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
-    {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
-    unpause(Consumer),
-    pause(Consumer),
-    etap:is(get_rows(Consumer), [], "No new rows"),
-
-    {ok, Rev4_2} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}, {<<"_rev">>, Rev4}]}),
-    {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
-    {ok, _Rev4_3} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}, {<<"_rev">>, Rev4_2}]}),
-    {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
-    {ok, Rev3_3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3_2}]}),
-    unpause(Consumer),
-    pause(Consumer),
-
-    NewRows = get_rows(Consumer),
-    etap:is(length(NewRows), 2, "Received 2 new rows"),
-    [Row14, Row16] = NewRows,
-    etap:is(Row14#row.seq, 14, "First row has seq 14"),
-    etap:is(Row14#row.id, <<"doc4">>, "First row is for doc doc4"),
-    etap:is(Row16#row.seq, 16, "Second row has seq 16"),
-    etap:is(Row16#row.id, <<"doc3">>, "Second row is for doc doc3"),
-
-    clear_rows(Consumer),
-    {ok, _Rev3_4} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev3_3}]}),
-    unpause(Consumer),
-    pause(Consumer),
-    etap:is(get_rows(Consumer), [#row{seq = 17, id = <<"doc3">>}],
-        "Got row for seq 17, doc doc3"),
-
-    unpause(Consumer),
-    stop(Consumer),
-    delete_db(Db).
-
-
-test_design_docs_only() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    {ok, Rev3} = save_doc(Db, {[{<<"_id">>, <<"_design/foo">>}]}),
-
-    ChangesArgs = #changes_args{
-        filter = "_design"
-    },
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
-
-    {Rows, LastSeq} = wait_finished(Consumer),
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    UpSeq = couch_db:get_update_seq(Db2),
-    couch_db:close(Db2),
-
-    etap:is(LastSeq, UpSeq, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows), 1, "Received 1 changes rows"),
-    etap:is(Rows, [#row{seq = 3, id = <<"_design/foo">>}], "Received row with ddoc"),
-
-    stop(Consumer),
-
-    {ok, Db3} = couch_db:open_int(
-        test_db_name(), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]),
-    {ok, _Rev3_2} = save_doc(
-        Db3,
-        {[{<<"_id">>, <<"_design/foo">>}, {<<"_rev">>, Rev3},
-            {<<"_deleted">>, true}]}),
-
-    Consumer2 = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
-
-    {Rows2, LastSeq2} = wait_finished(Consumer2),
-    UpSeq2 = UpSeq + 1,
-    couch_db:close(Db3),
-
-    etap:is(LastSeq2, UpSeq2, "LastSeq is same as database update seq number"),
-    etap:is(length(Rows2), 1, "Received 1 changes rows"),
-    etap:is(
-        Rows2,
-        [#row{seq = 4, id = <<"_design/foo">>, deleted = true}],
-        "Received row with deleted ddoc"),
-
-    stop(Consumer2),
-    delete_db(Db).
-
-test_heartbeat() ->
-    {ok, Db} = create_db(test_db_name()),
-
-    {ok, _} = save_doc(Db, {[
-        {<<"_id">>, <<"_design/foo">>},
-        {<<"language">>, <<"javascript">>},
-            {<<"filters">>, {[
-                {<<"foo">>, <<"function(doc) { if ((doc._id == 'doc10') ||
-                                                  (doc._id == 'doc11') ||
-                                                  (doc._id == 'doc12')) {
-                                                return true;
-                                               } else {
-                                                  return false;
-                                               }}">>
-            }]}}
-    ]}),
-
-    ChangesArgs = #changes_args{
-        filter = "foo/foo",
-        feed = "continuous",
-        timeout = 10000,
-        heartbeat = 1000
-    },
-    Consumer = spawn_consumer(test_db_name(), ChangesArgs, {json_req, null}),
-
-    {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-    timer:sleep(200),
-    {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-    timer:sleep(200),
-    {ok, _Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-    timer:sleep(200),
-    {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-    timer:sleep(200),
-    {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-    timer:sleep(200),
-    {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-    timer:sleep(200),
-    {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-    timer:sleep(200),
-    {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-    timer:sleep(200),
-    {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
-    Heartbeats = get_heartbeats(Consumer),
-    etap:is(Heartbeats, 2, "Received 2 heartbeats now"),
-    {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
-    timer:sleep(200),
-    {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
-    timer:sleep(200),
-    {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
-    Heartbeats2 = get_heartbeats(Consumer),
-    etap:is(Heartbeats2, 3, "Received 3 heartbeats now"),
-    Rows = get_rows(Consumer),
-    etap:is(length(Rows), 3, "Received 3 changes rows"),
-
-    {ok, _Rev13} = save_doc(Db, {[{<<"_id">>, <<"doc13">>}]}),
-    timer:sleep(200),
-    {ok, _Rev14} = save_doc(Db, {[{<<"_id">>, <<"doc14">>}]}),
-    timer:sleep(200),
-    Heartbeats3 = get_heartbeats(Consumer),
-    etap:is(Heartbeats3, 6, "Received 6 heartbeats now"),
-    stop(Consumer),
-    couch_db:close(Db),
-    delete_db(Db).
-
-
-save_doc(Db, Json) ->
-    Doc = couch_doc:from_json_obj(Json),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-    {ok, couch_doc:rev_to_str(Rev)}.
-
-
-get_rows(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {get_rows, Ref},
-    receive
-    {rows, Ref, Rows} ->
-        Rows
-    after 3000 ->
-        etap:bail("Timeout getting rows from consumer")
-    end.
-
-get_heartbeats(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {get_heartbeats, Ref},
-    receive
-    {hearthbeats, Ref, HeartBeats} ->
-        HeartBeats
-    after 3000 ->
-        etap:bail("Timeout getting heartbeats from consumer")
-    end.
-
-
-clear_rows(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {reset, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout clearing consumer rows")
-    end.
-
-
-stop(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {stop, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout stopping consumer")
-    end.
-
-
-pause(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {pause, Ref},
-    receive
-    {paused, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout pausing consumer")
-    end.
-
-
-unpause(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {continue, Ref},
-    receive
-    {ok, Ref} ->
-        ok
-    after 3000 ->
-        etap:bail("Timeout unpausing consumer")
-    end.
-
-
-wait_finished(_Consumer) ->
-    receive
-    {consumer_finished, Rows, LastSeq} ->
-        {Rows, LastSeq}
-    after 30000 ->
-        etap:bail("Timeout waiting for consumer to finish")
-    end.
-
-
-spawn_consumer(DbName, ChangesArgs0, Req) ->
-    Parent = self(),
-    spawn(fun() ->
-        put(heartbeat_count, 0),
-        Callback = fun({change, {Change}, _}, _, Acc) ->
-            Id = couch_util:get_value(<<"id">>, Change),
-            Seq = couch_util:get_value(<<"seq">>, Change),
-            Del = couch_util:get_value(<<"deleted">>, Change, false),
-            [#row{id = Id, seq = Seq, deleted = Del} | Acc];
-        ({stop, LastSeq}, _, Acc) ->
-            Parent ! {consumer_finished, lists:reverse(Acc), LastSeq},
-            stop_loop(Parent, Acc);
-        (timeout, _, Acc) ->
-            put(heartbeat_count, get(heartbeat_count) + 1),
-            maybe_pause(Parent, Acc);
-        (_, _, Acc) ->
-            maybe_pause(Parent, Acc)
-        end,
-        {ok, Db} = couch_db:open_int(DbName, []),
-        ChangesArgs = case (ChangesArgs0#changes_args.timeout =:= undefined)
-            andalso (ChangesArgs0#changes_args.heartbeat =:= undefined) of
-        true ->
-            ChangesArgs0#changes_args{timeout = 10, heartbeat = 10};
-        false ->
-            ChangesArgs0
-        end,
-        FeedFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
-        try
-            FeedFun({Callback, []})
-        catch throw:{stop, _} ->
-            ok
-        end,
-        catch couch_db:close(Db)
-    end).
-
-
-maybe_pause(Parent, Acc) ->
-    receive
-    {get_rows, Ref} ->
-        Parent ! {rows, Ref, lists:reverse(Acc)},
-        maybe_pause(Parent, Acc);
-    {get_heartbeats, Ref} ->
-        Parent ! {hearthbeats, Ref, get(heartbeat_count)},
-        maybe_pause(Parent, Acc);
-    {reset, Ref} ->
-        Parent ! {ok, Ref},
-        maybe_pause(Parent, []);
-    {pause, Ref} ->
-        Parent ! {paused, Ref},
-        pause_loop(Parent, Acc);
-    {stop, Ref} ->
-        Parent ! {ok, Ref},
-        throw({stop, Acc})
-    after 0 ->
-        Acc
-    end.
-
-
-pause_loop(Parent, Acc) ->
-    receive
-    {stop, Ref} ->
-        Parent ! {ok, Ref},
-        throw({stop, Acc});
-    {reset, Ref} ->
-        Parent ! {ok, Ref},
-        pause_loop(Parent, []);
-    {continue, Ref} ->
-        Parent ! {ok, Ref},
-        Acc;
-    {get_rows, Ref} ->
-        Parent ! {rows, Ref, lists:reverse(Acc)},
-        pause_loop(Parent, Acc)
-    end.
-
-
-stop_loop(Parent, Acc) ->
-    receive
-    {get_rows, Ref} ->
-        Parent ! {rows, Ref, lists:reverse(Acc)},
-        stop_loop(Parent, Acc);
-    {stop, Ref} ->
-        Parent ! {ok, Ref},
-        Acc
-    end.
-
-
-create_db(DbName) ->
-    couch_db:create(
-        DbName,
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]).
-
-
-delete_db(Db) ->
-    ok = couch_server:delete(
-        couch_db:name(Db), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/b3379023/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 4e4997e..517705f 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    073-changes.t \
     074-doc-update-conflicts.t \
     075-auth-cache.t \
     076-file-compression.t \


[32/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 160-vhosts.t etap test suite to eunit

Split Rewrite and OAuth tests.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/ad4b940d
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/ad4b940d
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/ad4b940d

Branch: refs/heads/1963-eunit
Commit: ad4b940d7c9a3435e2939f6d7ff3272467ed1a87
Parents: 79c69a9
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 10:58:58 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:59 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am              |   1 +
 test/couchdb/couchdb_vhosts_tests.erl | 443 +++++++++++++++++++++++++++++
 test/couchdb/test_request.erl         |  15 +-
 test/etap/160-vhosts.t                | 371 ------------------------
 test/etap/Makefile.am                 |   1 -
 5 files changed, 454 insertions(+), 377 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/ad4b940d/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index ccfcbcf..cfe18db 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -38,6 +38,7 @@ eunit_files = \
     couch_ref_counter_tests.erl \
     couch_stats_tests.erl \
     couchdb_attachments_tests.erl \
+    couchdb_vhosts_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ad4b940d/test/couchdb/couchdb_vhosts_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_vhosts_tests.erl b/test/couchdb/couchdb_vhosts_tests.erl
new file mode 100644
index 0000000..dd05d2e
--- /dev/null
+++ b/test/couchdb/couchdb_vhosts_tests.erl
@@ -0,0 +1,443 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_vhosts_tests).
+
+-include_lib("../../src/couchdb/couch_db.hrl").
+-include_lib("couchdb_tests.hrl").
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
+-define(iofmt(S, A), lists:flatten(io_lib:format(S, A))).
+
+
+start() ->
+    {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+    % disable logging to reduce noise in stdout
+    couch_config:set("log", "level", "none", false),
+    Pid.
+
+stop(Pid) ->
+    couch_server_sup:stop(),
+    erlang:monitor(process, Pid),
+    receive
+        {'DOWN', _, _, Pid, _} ->
+            ok
+    after 1000 ->
+        throw({timeout, server_stop})
+    end.
+
+setup() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]),
+    Doc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"doc1">>},
+        {<<"value">>, 666}
+    ]}),
+
+    Doc1 = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/doc1">>},
+        {<<"shows">>, {[
+            {<<"test">>, <<"function(doc, req) {
+            return { json: {
+                    requested_path: '/' + req.requested_path.join('/'),
+                    path: '/' + req.path.join('/')}};}">>}
+        ]}},
+        {<<"rewrites">>, [
+            {[
+                {<<"from">>, <<"/">>},
+                {<<"to">>, <<"_show/test">>}
+            ]}
+        ]}
+    ]}),
+    {ok, _} = couch_db:update_docs(Db, [Doc, Doc1]),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+
+    Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+    Url = "http://" ++ Addr ++ ":" ++ Port,
+    {Url, ?b2l(DbName)}.
+
+setup_oauth() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]),
+
+    couch_config:set("couch_httpd_auth", "authentication_db",
+                     ?b2l(?tempdb()), false),
+    couch_config:set("oauth_token_users", "otoksec1", "joe", false),
+    couch_config:set("oauth_consumer_secrets", "consec1", "foo", false),
+    couch_config:set("oauth_token_secrets", "otoksec1", "foobar", false),
+    couch_config:set("couch_httpd_auth", "require_valid_user", "true", false),
+
+    ok = couch_config:set(
+        "vhosts", "oauth-example.com",
+        "/" ++ ?b2l(DbName) ++ "/_design/test/_rewrite/foobar", false),
+
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/test">>},
+        {<<"language">>, <<"javascript">>},
+        {<<"rewrites">>, [
+            {[
+                {<<"from">>, <<"foobar">>},
+                {<<"to">>, <<"_info">>}
+            ]}
+        ]}
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, []),
+
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+
+    Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+    Url = "http://" ++ Addr ++ ":" ++ Port,
+    {Url, ?b2l(DbName)}.
+
+teardown({_, DbName}) ->
+    ok = couch_server:delete(?l2b(DbName), []),
+    ok.
+
+
+vhosts_test_() ->
+    {
+        "Virtual Hosts rewrite tests",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            {
+                foreach,
+                fun setup/0, fun teardown/1,
+                [
+                    fun should_return_database_info/1,
+                    fun should_return_revs_info/1,
+                    fun should_serve_utils_for_vhost/1,
+                    fun should_return_virtual_request_path_field_in_request/1,
+                    fun should_return_real_request_path_field_in_request/1,
+                    fun should_match_wildcard_vhost/1,
+                    fun should_return_db_info_for_wildcard_vhost_for_custom_db/1,
+                    fun should_replace_rewrite_variables_for_db_and_doc/1,
+                    fun should_return_db_info_for_vhost_with_resource/1,
+                    fun should_return_revs_info_for_vhost_with_resource/1,
+                    fun should_return_db_info_for_vhost_with_wildcard_resource/1,
+                    fun should_return_path_for_vhost_with_wildcard_host/1
+                ]
+            }
+        }
+    }.
+
+oauth_test_() ->
+    {
+        "Virtual Hosts OAuth tests",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            {
+                foreach,
+                fun setup_oauth/0, fun teardown/1,
+                [
+                    fun should_require_auth/1,
+                    fun should_succeed_oauth/1,
+                    fun should_fail_oauth_with_wrong_credentials/1
+                ]
+            }
+        }
+    }.
+
+
+should_return_database_info({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "example.com", "/" ++ DbName, false),
+        case test_request:get(Url, [], [{host_header, "example.com"}]) of
+            {ok, _, _, Body} ->
+                {JsonBody} = ejson:decode(Body),
+                ?assert(proplists:is_defined(<<"db_name">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_revs_info({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "example.com", "/" ++ DbName, false),
+        case test_request:get(Url ++ "/doc1?revs_info=true", [],
+                              [{host_header, "example.com"}]) of
+            {ok, _, _, Body} ->
+                {JsonBody} = ejson:decode(Body),
+                ?assert(proplists:is_defined(<<"_revs_info">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_serve_utils_for_vhost({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "example.com", "/" ++ DbName, false),
+        case test_request:get(Url ++ "/_utils/index.html", [],
+                              [{host_header, "example.com"}]) of
+            {ok, _, _, Body} ->
+                ?assertMatch(<<"<!DOCTYPE html>", _/binary>>, Body);
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_virtual_request_path_field_in_request({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "example1.com",
+                              "/" ++ DbName ++ "/_design/doc1/_rewrite/",
+                              false),
+        case test_request:get(Url, [], [{host_header, "example1.com"}]) of
+            {ok, _, _, Body} ->
+                {Json} = ejson:decode(Body),
+                ?assertEqual(<<"/">>,
+                             proplists:get_value(<<"requested_path">>, Json));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_real_request_path_field_in_request({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "example1.com",
+                              "/" ++ DbName ++ "/_design/doc1/_rewrite/",
+                              false),
+        case test_request:get(Url, [], [{host_header, "example1.com"}]) of
+            {ok, _, _, Body} ->
+                {Json} = ejson:decode(Body),
+                Path = ?l2b("/" ++ DbName ++ "/_design/doc1/_show/test"),
+                ?assertEqual(Path, proplists:get_value(<<"path">>, Json));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_match_wildcard_vhost({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "*.example.com",
+                              "/" ++ DbName ++ "/_design/doc1/_rewrite", false),
+        case test_request:get(Url, [], [{host_header, "test.example.com"}]) of
+            {ok, _, _, Body} ->
+                {Json} = ejson:decode(Body),
+                Path = ?l2b("/" ++ DbName ++ "/_design/doc1/_show/test"),
+                ?assertEqual(Path, proplists:get_value(<<"path">>, Json));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_db_info_for_wildcard_vhost_for_custom_db({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", ":dbname.example1.com",
+                              "/:dbname", false),
+        Host = DbName ++ ".example1.com",
+        case test_request:get(Url, [], [{host_header, Host}]) of
+            {ok, _, _, Body} ->
+                {JsonBody} = ejson:decode(Body),
+                ?assert(proplists:is_defined(<<"db_name">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_replace_rewrite_variables_for_db_and_doc({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts",":appname.:dbname.example1.com",
+                              "/:dbname/_design/:appname/_rewrite/", false),
+        Host = "doc1." ++ DbName ++ ".example1.com",
+        case test_request:get(Url, [], [{host_header, Host}]) of
+            {ok, _, _, Body} ->
+                {Json} = ejson:decode(Body),
+                Path = ?l2b("/" ++ DbName ++ "/_design/doc1/_show/test"),
+                ?assertEqual(Path, proplists:get_value(<<"path">>, Json));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_db_info_for_vhost_with_resource({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts",
+                              "example.com/test", "/" ++ DbName, false),
+        ReqUrl = Url ++ "/test",
+        case test_request:get(ReqUrl, [], [{host_header, "example.com"}]) of
+            {ok, _, _, Body} ->
+                {JsonBody} = ejson:decode(Body),
+                ?assert(proplists:is_defined(<<"db_name">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+
+should_return_revs_info_for_vhost_with_resource({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts",
+                              "example.com/test", "/" ++ DbName, false),
+        ReqUrl = Url ++ "/test/doc1?revs_info=true",
+        case test_request:get(ReqUrl, [], [{host_header, "example.com"}]) of
+            {ok, _, _, Body} ->
+                {JsonBody} = ejson:decode(Body),
+                ?assert(proplists:is_defined(<<"_revs_info">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_db_info_for_vhost_with_wildcard_resource({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "*.example2.com/test", "/*", false),
+        ReqUrl = Url ++ "/test",
+        Host = DbName ++ ".example2.com",
+        case test_request:get(ReqUrl, [], [{host_header, Host}]) of
+            {ok, _, _, Body} ->
+                {JsonBody} = ejson:decode(Body),
+                ?assert(proplists:is_defined(<<"db_name">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_return_path_for_vhost_with_wildcard_host({Url, DbName}) ->
+    ?_test(begin
+        ok = couch_config:set("vhosts", "*/test1",
+                              "/" ++ DbName ++ "/_design/doc1/_show/test",
+                              false),
+        case test_request:get(Url ++ "/test1") of
+            {ok, _, _, Body} ->
+                {Json} = ejson:decode(Body),
+                Path = ?l2b("/" ++ DbName ++ "/_design/doc1/_show/test"),
+                ?assertEqual(Path, proplists:get_value(<<"path">>, Json));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_require_auth({Url, _}) ->
+    ?_test(begin
+        case test_request:get(Url, [], [{host_header, "oauth-example.com"}]) of
+            {ok, Code, _, Body} ->
+                ?assertEqual(401, Code),
+                {JsonBody} = ejson:decode(Body),
+                ?assertEqual(<<"unauthorized">>,
+                             couch_util:get_value(<<"error">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_succeed_oauth({Url, _}) ->
+    ?_test(begin
+        AuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
+        JoeDoc = couch_doc:from_json_obj({[
+            {<<"_id">>, <<"org.couchdb.user:joe">>},
+            {<<"type">>, <<"user">>},
+            {<<"name">>, <<"joe">>},
+            {<<"roles">>, []},
+            {<<"password_sha">>, <<"fe95df1ca59a9b567bdca5cbaf8412abd6e06121">>},
+            {<<"salt">>, <<"4e170ffeb6f34daecfd814dfb4001a73">>}
+        ]}),
+        {ok, AuthDb} = couch_db:open_int(?l2b(AuthDbName), [?ADMIN_USER]),
+        {ok, _} = couch_db:update_doc(AuthDb, JoeDoc, [?ADMIN_USER]),
+
+        Host = "oauth-example.com",
+        Consumer = {"consec1", "foo", hmac_sha1},
+        SignedParams = oauth:sign(
+            "GET", "http://" ++ Host ++ "/", [], Consumer, "otoksec1", "foobar"),
+        OAuthUrl = oauth:uri(Url, SignedParams),
+
+        case test_request:get(OAuthUrl, [], [{host_header, Host}]) of
+            {ok, Code, _, Body} ->
+                ?assertEqual(200, Code),
+                {JsonBody} = ejson:decode(Body),
+                ?assertEqual(<<"test">>,
+                             couch_util:get_value(<<"name">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+
+should_fail_oauth_with_wrong_credentials({Url, _}) ->
+    ?_test(begin
+        AuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
+        JoeDoc = couch_doc:from_json_obj({[
+            {<<"_id">>, <<"org.couchdb.user:joe">>},
+            {<<"type">>, <<"user">>},
+            {<<"name">>, <<"joe">>},
+            {<<"roles">>, []},
+            {<<"password_sha">>, <<"fe95df1ca59a9b567bdca5cbaf8412abd6e06121">>},
+            {<<"salt">>, <<"4e170ffeb6f34daecfd814dfb4001a73">>}
+        ]}),
+        {ok, AuthDb} = couch_db:open_int(?l2b(AuthDbName), [?ADMIN_USER]),
+        {ok, _} = couch_db:update_doc(AuthDb, JoeDoc, [?ADMIN_USER]),
+
+        Host = "oauth-example.com",
+        Consumer = {"consec1", "bad_secret", hmac_sha1},
+        SignedParams = oauth:sign(
+            "GET", "http://" ++ Host ++ "/", [], Consumer, "otoksec1", "foobar"),
+        OAuthUrl = oauth:uri(Url, SignedParams),
+
+        case test_request:get(OAuthUrl, [], [{host_header, Host}]) of
+            {ok, Code, _, Body} ->
+                ?assertEqual(401, Code),
+                {JsonBody} = ejson:decode(Body),
+                ?assertEqual(<<"unauthorized">>,
+                             couch_util:get_value(<<"error">>, JsonBody));
+            Else ->
+                erlang:error({assertion_failed,
+                             [{module, ?MODULE},
+                              {line, ?LINE},
+                              {reason, ?iofmt("Request failed: ~p", [Else])}]})
+        end
+    end).
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ad4b940d/test/couchdb/test_request.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/test_request.erl b/test/couchdb/test_request.erl
index 10801ae..2de045f 100644
--- a/test/couchdb/test_request.erl
+++ b/test/couchdb/test_request.erl
@@ -12,13 +12,15 @@
 
 -module(test_request).
 
--export([get/1, get/2, put/2, put/3]).
+-export([get/1, get/2, get/3, put/2, put/3]).
 -export([request/3, request/4]).
 
 get(Url) ->
     request(get, Url, []).
 get(Url, Headers) ->
     request(get, Url, Headers).
+get(Url, Headers, Opts) ->
+    request(get, Url, Headers, [], Opts).
 
 put(Url, Body) ->
     request(put, Url, [], Body).
@@ -31,18 +33,21 @@ request(Method, Url, Headers) ->
     request(Method, Url, Headers, []).
 
 request(Method, Url, Headers, Body) ->
-    request(Method, Url, Headers, Body, 3).
+    request(Method, Url, Headers, Body, [], 3).
 
-request(_Method, _Url, _Headers, _Body, 0) ->
+request(Method, Url, Headers, Body, Opts) ->
+    request(Method, Url, Headers, Body, Opts, 3).
+
+request(_Method, _Url, _Headers, _Body, _Opts, 0) ->
     {error, request_failed};
-request(Method, Url, Headers, Body, N) ->
+request(Method, Url, Headers, Body, Opts, N) ->
     case code:is_loaded(ibrowse) of
         false ->
             {ok, _} = ibrowse:start();
         _ ->
             ok
     end,
-    case ibrowse:send_req(Url, Headers, Method, Body) of
+    case ibrowse:send_req(Url, Headers, Method, Body, Opts) of
         {ok, Code0, RespHeaders, RespBody0} ->
             Code = list_to_integer(Code0),
             RespBody = iolist_to_binary(RespBody0),

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ad4b940d/test/etap/160-vhosts.t
----------------------------------------------------------------------
diff --git a/test/etap/160-vhosts.t b/test/etap/160-vhosts.t
deleted file mode 100755
index 46fdd73..0000000
--- a/test/etap/160-vhosts.t
+++ /dev/null
@@ -1,371 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-server() ->
-    lists:concat([
-        "http://127.0.0.1:", mochiweb_socket_server:get(couch_httpd, port), "/"
-    ]).
-
-dbname() -> "etap-test-db".
-admin_user_ctx() -> {user_ctx, #user_ctx{roles=[<<"_admin">>]}}.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(20),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    ibrowse:start(),
-    crypto:start(),
-
-    timer:sleep(1000),
-    couch_server:delete(list_to_binary(dbname()), [admin_user_ctx()]),
-    {ok, Db} = couch_db:create(list_to_binary(dbname()), [admin_user_ctx()]),
-
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"doc1">>},
-        {<<"value">>, 666}
-    ]}),
-
-    Doc1 = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/doc1">>},
-        {<<"shows">>, {[
-            {<<"test">>, <<"function(doc, req) {
-    return { json: {
-        requested_path: '/' + req.requested_path.join('/'),
-        path: '/' + req.path.join('/')
-    }};
-}">>}
-        ]}},
-        {<<"rewrites">>, [
-            {[
-                {<<"from">>, <<"/">>},
-                {<<"to">>, <<"_show/test">>}
-            ]}
-        ]}
-    ]}),
-
-    {ok, _} = couch_db:update_docs(Db, [Doc, Doc1]),
-
-    couch_db:ensure_full_commit(Db),
-
-    %% end boilerplate, start test
-
-    ok = couch_config:set("vhosts", "example.com", "/etap-test-db", false),
-    ok = couch_config:set("vhosts", "*.example.com",
-            "/etap-test-db/_design/doc1/_rewrite", false),
-    ok = couch_config:set("vhosts", "example.com/test", "/etap-test-db", false),
-    ok = couch_config:set("vhosts", "example1.com",
-            "/etap-test-db/_design/doc1/_rewrite/", false),
-    ok = couch_config:set("vhosts",":appname.:dbname.example1.com",
-            "/:dbname/_design/:appname/_rewrite/", false),
-    ok = couch_config:set("vhosts", ":dbname.example1.com", "/:dbname", false),
-
-    ok = couch_config:set("vhosts", "*.example2.com", "/*", false),
-    ok = couch_config:set("vhosts", "*.example2.com/test", "/*", false),
-    ok = couch_config:set("vhosts", "*/test", "/etap-test-db", false),
-    ok = couch_config:set("vhosts", "*/test1",
-            "/etap-test-db/_design/doc1/_show/test", false),
-    ok = couch_config:set("vhosts", "example3.com", "/", false),
-
-    %% reload rules
-    couch_httpd_vhost:reload(),
-
-    test_regular_request(),
-    test_vhost_request(),
-    test_vhost_request_with_qs(),
-    test_vhost_request_with_global(),
-    test_vhost_requested_path(),
-    test_vhost_requested_path_path(),
-    test_vhost_request_wildcard(),
-    test_vhost_request_replace_var(),
-    test_vhost_request_replace_var1(),
-    test_vhost_request_replace_wildcard(),
-    test_vhost_request_path(),
-    test_vhost_request_path1(),
-    test_vhost_request_path2(),
-    test_vhost_request_path3(),
-    test_vhost_request_to_root(),
-    test_vhost_request_with_oauth(Db),
-
-    %% restart boilerplate
-    couch_db:close(Db),
-    ok = couch_server:delete(couch_db:name(Db), [admin_user_ctx()]),
-    timer:sleep(3000),
-    couch_server_sup:stop(),
-
-    ok.
-
-test_regular_request() ->
-    case ibrowse:send_req(server(), [], get, []) of
-        {ok, _, _, Body} ->
-            {Props} = ejson:decode(Body),
-            Couchdb = couch_util:get_value(<<"couchdb">>, Props),
-            Version = couch_util:get_value(<<"version">>, Props),
-            Vendor = couch_util:get_value(<<"vendor">>, Props),
-            etap:isnt(Couchdb, undefined, "Found couchdb property"),
-            etap:isnt(Version, undefined, "Found version property"),
-            etap:isnt(Vendor, undefined, "Found vendor property");
-        _Else ->
-            etap:bail("http GET / request failed")
-    end.
-
-test_vhost_request() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else ->
-           etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_with_qs() ->
-    Url = server() ++ "doc1?revs_info=true",
-    case ibrowse:send_req(Url, [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonProps} = ejson:decode(Body),
-            HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
-            etap:is(HasRevsInfo, true, "should return _revs_info");
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_with_global() ->
-    Url2 = server() ++ "_utils/index.html",
-    case ibrowse:send_req(Url2, [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body2} ->
-            "<!DOCTYPE" ++ _Foo = Body2,
-            etap:is(true, true, "should serve /_utils even inside vhosts");
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_requested_path() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"requested_path">>, Json) of
-                <<"/">> -> true;
-                _ -> false
-            end, true, <<"requested path in req ok">>);
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_requested_path_path() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "example1.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"path in req ok">>);
-        _Else ->
-            etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_wildcard()->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "test.example.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"wildcard  ok">>);
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-
-test_vhost_request_replace_var() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example1.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_replace_var1() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "doc1.etap-test-db.example1.com"}]) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"wildcard  ok">>);
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_replace_wildcard() ->
-    case ibrowse:send_req(server(), [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path() ->
-    Uri = server() ++ "test",
-    case ibrowse:send_req(Uri, [], get, [], [{host_header, "example.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path1() ->
-    Url = server() ++ "test/doc1?revs_info=true",
-    case ibrowse:send_req(Url, [], get, [], []) of
-        {ok, _, _, Body} ->
-            {JsonProps} = ejson:decode(Body),
-            HasRevsInfo = proplists:is_defined(<<"_revs_info">>, JsonProps),
-            etap:is(HasRevsInfo, true, "should return _revs_info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path2() ->
-    Uri = server() ++ "test",
-    case ibrowse:send_req(Uri, [], get, [], [{host_header,"etap-test-db.example2.com"}]) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasDbNameInfo = proplists:is_defined(<<"db_name">>, JsonBody),
-            etap:is(HasDbNameInfo, true, "should return database info");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_path3() ->
-    Uri = server() ++ "test1",
-    case ibrowse:send_req(Uri, [], get, [], []) of
-        {ok, _, _, Body} ->
-            {Json} = ejson:decode(Body),
-            etap:is(case proplists:get_value(<<"path">>, Json) of
-                <<"/etap-test-db/_design/doc1/_show/test">> -> true;
-                _ -> false
-            end, true, <<"path in req ok">>);
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_to_root() ->
-    Uri = server(),
-    case ibrowse:send_req(Uri, [], get, [], []) of
-        {ok, _, _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            HasCouchDBWelcome = proplists:is_defined(<<"couchdb">>, JsonBody),
-            etap:is(HasCouchDBWelcome, true, "should allow redirect to /");
-        _Else -> etap:is(false, true, <<"ibrowse fail">>)
-    end.
-
-test_vhost_request_with_oauth(Db) ->
-    {ok, AuthDb} = couch_db:create(
-        <<"tap_test_sec_db">>, [admin_user_ctx(), overwrite]),
-    PrevAuthDbName = couch_config:get("couch_httpd_auth", "authentication_db"),
-    couch_config:set("couch_httpd_auth", "authentication_db", "tap_test_sec_db", false),
-    couch_config:set("oauth_token_users", "otoksec1", "joe", false),
-    couch_config:set("oauth_consumer_secrets", "consec1", "foo", false),
-    couch_config:set("oauth_token_secrets", "otoksec1", "foobar", false),
-    couch_config:set("couch_httpd_auth", "require_valid_user", "true", false),
-
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"_design/test">>},
-        {<<"language">>, <<"javascript">>},
-        {<<"rewrites">>, [
-            {[
-                {<<"from">>, <<"foobar">>},
-                {<<"to">>, <<"_info">>}
-            ]}
-        ]}
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, []),
-
-    RewritePath = "/etap-test-db/_design/test/_rewrite/foobar",
-    ok = couch_config:set("vhosts", "oauth-example.com", RewritePath, false),
-    couch_httpd_vhost:reload(),
-
-    case ibrowse:send_req(server(), [], get, [], [{host_header, "oauth-example.com"}]) of
-        {ok, "401", _, Body} ->
-            {JsonBody} = ejson:decode(Body),
-            etap:is(
-                couch_util:get_value(<<"error">>, JsonBody),
-                <<"unauthorized">>,
-                "Request without OAuth credentials failed");
-        Error ->
-           etap:bail("Request without OAuth credentials did not fail: " ++
-               couch_util:to_list(Error))
-    end,
-
-    JoeDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"org.couchdb.user:joe">>},
-        {<<"type">>, <<"user">>},
-        {<<"name">>, <<"joe">>},
-        {<<"roles">>, []},
-        {<<"password_sha">>, <<"fe95df1ca59a9b567bdca5cbaf8412abd6e06121">>},
-        {<<"salt">>, <<"4e170ffeb6f34daecfd814dfb4001a73">>}
-    ]}),
-    {ok, _} = couch_db:update_doc(AuthDb, JoeDoc, []),
-
-    Url = "http://oauth-example.com/",
-    Consumer = {"consec1", "foo", hmac_sha1},
-    SignedParams = oauth:sign(
-        "GET", Url, [], Consumer, "otoksec1", "foobar"),
-    OAuthUrl = oauth:uri(server(), SignedParams),
-
-    case ibrowse:send_req(OAuthUrl, [], get, [], [{host_header, "oauth-example.com"}]) of
-        {ok, "200", _, Body2} ->
-            {JsonBody2} = ejson:decode(Body2),
-            etap:is(couch_util:get_value(<<"name">>, JsonBody2), <<"test">>,
-                "should return ddoc info with OAuth credentials");
-        Error2 ->
-           etap:bail("Failed to access vhost with OAuth credentials: " ++
-               couch_util:to_list(Error2))
-    end,
-
-    Consumer2 = {"consec1", "bad_secret", hmac_sha1},
-    SignedParams2 = oauth:sign(
-        "GET", Url, [], Consumer2, "otoksec1", "foobar"),
-    OAuthUrl2 = oauth:uri(server(), SignedParams2),
-
-    case ibrowse:send_req(OAuthUrl2, [], get, [], [{host_header, "oauth-example.com"}]) of
-        {ok, "401", _, Body3} ->
-            {JsonBody3} = ejson:decode(Body3),
-            etap:is(
-                couch_util:get_value(<<"error">>, JsonBody3),
-                <<"unauthorized">>,
-                "Request with bad OAuth credentials failed");
-        Error3 ->
-           etap:bail("Failed to access vhost with bad OAuth credentials: " ++
-               couch_util:to_list(Error3))
-    end,
-
-    couch_config:set("couch_httpd_auth", "authentication_db", PrevAuthDbName, false),
-    couch_config:set("couch_httpd_auth", "require_valid_user", "false", false),
-    ok = couch_server:delete(couch_db:name(AuthDb), [admin_user_ctx()]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/ad4b940d/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index bf9b3f2..a19cab9 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    160-vhosts.t \
     170-os-daemons.es \
     170-os-daemons.t \
     171-os-daemons-config.es \


[18/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 081-config-override.t etap test suite to eunit

Merged into couch_config_tests suite.
Setup fixtures.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/3e666270
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/3e666270
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/3e666270

Branch: refs/heads/1963-eunit
Commit: 3e666270fcd1d9320f88cf49b118b8f2ec1710ea
Parents: f90ddf5
Author: Alexander Shorin <kx...@apache.org>
Authored: Sun May 25 22:02:19 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:26 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                       |   7 +-
 test/couchdb/couch_config_tests.erl            | 142 ++++++++++++-
 test/couchdb/couchdb_tests.hrl.in              |   2 +
 test/couchdb/fixtures/couch_config_tests_1.ini |  22 ++
 test/couchdb/fixtures/couch_config_tests_2.ini |  22 ++
 test/etap/081-config-override.1.ini            |  22 --
 test/etap/081-config-override.2.ini            |  22 --
 test/etap/081-config-override.t                | 212 --------------------
 test/etap/Makefile.am                          |   3 -
 9 files changed, 192 insertions(+), 262 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 8668b96..3cd7060 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -37,9 +37,14 @@ eunit_files = \
     test_request.erl \
     couchdb_tests.hrl
 
+fixture_files = \
+    fixtures/couch_config_tests_1.ini \
+    fixtures/couch_config_tests_2.ini
+
 EXTRA_DIST = \
     run.in \
-    $(eunit_files)
+    $(eunit_files) \
+    $(fixture_files)
 
 clean-local:
 	rm -rf ebin

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/couchdb/couch_config_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_config_tests.erl b/test/couchdb/couch_config_tests.erl
index 9d09640..401da58 100644
--- a/test/couchdb/couch_config_tests.erl
+++ b/test/couchdb/couch_config_tests.erl
@@ -15,9 +15,30 @@
 -include("../../src/couchdb/couch_db.hrl").
 -include("couchdb_tests.hrl").
 
+-define(CONFIG_DEFAULT,
+        filename:join([?BUILDDIR, "etc", "couchdb", "default_dev.ini"])).
+-define(CONFIG_FIXTURE_1,
+        filename:join([?FIXTURESDIR, "couch_config_tests_1.ini"])).
+-define(CONFIG_FIXTURE_2,
+        filename:join([?FIXTURESDIR, "couch_config_tests_2.ini"])).
+-define(CONFIG_FIXTURE_TEMP,
+    begin
+        FileName = filename:join([?TEMPDIR, "couch_config_temp.ini"]),
+        {ok, Fd} = file:open(FileName, write),
+        ok = file:truncate(Fd),
+        ok = file:close(Fd),
+        FileName
+    end).
+
 
 setup() ->
-    {ok, Pid} = couch_config:start_link(?CONFIG_CHAIN),
+    setup(?CONFIG_CHAIN).
+setup({temporary, Chain}) ->
+    setup(Chain);
+setup({persistent, Chain}) ->
+    setup(lists:append(Chain, [?CONFIG_FIXTURE_TEMP]));
+setup(Chain) ->
+    {ok, Pid} = couch_config:start_link(Chain),
     Pid.
 
 teardown(Pid) ->
@@ -29,6 +50,8 @@ teardown(Pid) ->
     after 1000 ->
         throw({timeout_error, config_stop})
     end.
+teardown(_, Pid) ->
+    teardown(Pid).
 
 
 couch_config_test_() ->
@@ -37,7 +60,9 @@ couch_config_test_() ->
         [
             couch_config_get_tests(),
             couch_config_set_tests(),
-            couch_config_del_tests()
+            couch_config_del_tests(),
+            config_override_tests(),
+            config_persistent_changes_tests()
         ]
     }.
 
@@ -88,6 +113,43 @@ couch_config_del_tests() ->
         }
     }.
 
+config_override_tests() ->
+    {
+        "Configs overide tests",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [
+                {{temporary, [?CONFIG_DEFAULT]},
+                 fun should_ensure_in_defaults/2},
+                {{temporary, [?CONFIG_DEFAULT, ?CONFIG_FIXTURE_1]},
+                 fun should_override_options/2},
+                {{temporary, [?CONFIG_DEFAULT, ?CONFIG_FIXTURE_2]},
+                 fun should_create_new_sections_on_override/2},
+                {{temporary, [?CONFIG_DEFAULT, ?CONFIG_FIXTURE_1,
+                              ?CONFIG_FIXTURE_2]},
+                 fun should_win_last_in_chain/2}
+            ]
+        }
+    }.
+
+config_persistent_changes_tests() ->
+    {
+        "Config persistent changes",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [
+                {{persistent, [?CONFIG_DEFAULT]},
+                 fun should_write_changes/2},
+                {{temporary, [?CONFIG_DEFAULT]},
+                 fun should_ensure_that_default_wasnt_modified/2},
+                {{temporary, [?CONFIG_FIXTURE_TEMP]},
+                 fun should_ensure_that_written_to_last_config_in_chain/2}
+            ]
+        }
+    }.
+
 
 should_load_all_configs() ->
     ?_assert(length(couch_config:all()) > 0).
@@ -161,3 +223,79 @@ should_delete_binary_option() ->
             ok = couch_config:delete(<<"foo">>, <<"bar">>, false),
             couch_config:get(<<"foo">>, <<"bar">>)
         end).
+
+should_ensure_in_defaults(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("100",
+                         couch_config:get("couchdb", "max_dbs_open")),
+            ?assertEqual("5984",
+                         couch_config:get("httpd", "port")),
+            ?assertEqual(undefined,
+                         couch_config:get("fizbang", "unicode")),
+            true
+        end).
+
+should_override_options(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("10",
+                         couch_config:get("couchdb", "max_dbs_open")),
+            ?assertEqual("4895",
+                         couch_config:get("httpd", "port")),
+            true
+        end).
+
+should_create_new_sections_on_override(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("80",
+                         couch_config:get("httpd", "port")),
+            ?assertEqual("normalized",
+                         couch_config:get("fizbang", "unicode")),
+            true
+        end).
+
+should_win_last_in_chain(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("80",
+                         couch_config:get("httpd", "port")),
+            true
+        end).
+
+should_write_changes(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("5984",
+                         couch_config:get("httpd", "port")),
+            ?assertEqual(ok,
+                         couch_config:set("httpd", "port", "8080")),
+            ?assertEqual("8080",
+                         couch_config:get("httpd", "port")),
+            ?assertEqual(ok,
+                         couch_config:delete("httpd", "bind_address", "8080")),
+            ?assertEqual(undefined,
+                         couch_config:get("httpd", "bind_address")),
+            true
+        end).
+
+should_ensure_that_default_wasnt_modified(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("5984",
+                         couch_config:get("httpd", "port")),
+            ?assertEqual("127.0.0.1",
+                         couch_config:get("httpd", "bind_address")),
+            true
+        end).
+
+should_ensure_that_written_to_last_config_in_chain(_, _) ->
+    ?_assert(
+        begin
+            ?assertEqual("8080",
+                         couch_config:get("httpd", "port")),
+            ?assertEqual(undefined,
+                         couch_config:get("httpd", "bind_address")),
+            true
+        end).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/couchdb/couchdb_tests.hrl.in
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_tests.hrl.in b/test/couchdb/couchdb_tests.hrl.in
index 1014c78..78d537f 100644
--- a/test/couchdb/couchdb_tests.hrl.in
+++ b/test/couchdb/couchdb_tests.hrl.in
@@ -18,6 +18,8 @@
     filename:join([?BUILDDIR, "etc", "couchdb", "default_dev.ini"]),
     filename:join([?SOURCEDIR, "test", "random_port.ini"]),
     filename:join([?BUILDDIR, "etc", "couchdb", "local_dev.ini"])]).
+-define(FIXTURESDIR,
+    filename:join([?SOURCEDIR, "test", "couchdb", "fixtures"])).
 -define(TEMPDIR,
     filename:join([?SOURCEDIR, "test", "couchdb", "temp"])).
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/couchdb/fixtures/couch_config_tests_1.ini
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/couch_config_tests_1.ini b/test/couchdb/fixtures/couch_config_tests_1.ini
new file mode 100644
index 0000000..55451da
--- /dev/null
+++ b/test/couchdb/fixtures/couch_config_tests_1.ini
@@ -0,0 +1,22 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[couchdb]
+max_dbs_open=10
+
+[httpd]
+port=4895

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/couchdb/fixtures/couch_config_tests_2.ini
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/couch_config_tests_2.ini b/test/couchdb/fixtures/couch_config_tests_2.ini
new file mode 100644
index 0000000..5f46357
--- /dev/null
+++ b/test/couchdb/fixtures/couch_config_tests_2.ini
@@ -0,0 +1,22 @@
+; Licensed to the Apache Software Foundation (ASF) under one
+; or more contributor license agreements.  See the NOTICE file
+; distributed with this work for additional information
+; regarding copyright ownership.  The ASF licenses this file
+; to you under the Apache License, Version 2.0 (the
+; "License"); you may not use this file except in compliance
+; with the License.  You may obtain a copy of the License at
+; 
+;   http://www.apache.org/licenses/LICENSE-2.0
+;
+; Unless required by applicable law or agreed to in writing,
+; software distributed under the License is distributed on an
+; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+; KIND, either express or implied.  See the License for the
+; specific language governing permissions and limitations
+; under the License.
+
+[httpd]
+port = 80
+
+[fizbang]
+unicode = normalized

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/etap/081-config-override.1.ini
----------------------------------------------------------------------
diff --git a/test/etap/081-config-override.1.ini b/test/etap/081-config-override.1.ini
deleted file mode 100644
index 55451da..0000000
--- a/test/etap/081-config-override.1.ini
+++ /dev/null
@@ -1,22 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[couchdb]
-max_dbs_open=10
-
-[httpd]
-port=4895

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/etap/081-config-override.2.ini
----------------------------------------------------------------------
diff --git a/test/etap/081-config-override.2.ini b/test/etap/081-config-override.2.ini
deleted file mode 100644
index 5f46357..0000000
--- a/test/etap/081-config-override.2.ini
+++ /dev/null
@@ -1,22 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[httpd]
-port = 80
-
-[fizbang]
-unicode = normalized

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/etap/081-config-override.t
----------------------------------------------------------------------
diff --git a/test/etap/081-config-override.t b/test/etap/081-config-override.t
deleted file mode 100755
index 01f8b4c..0000000
--- a/test/etap/081-config-override.t
+++ /dev/null
@@ -1,212 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-local_config_1() ->
-    test_util:source_file("test/etap/081-config-override.1.ini").
-
-local_config_2() ->
-    test_util:source_file("test/etap/081-config-override.2.ini").
-
-local_config_write() ->
-    test_util:build_file("test/etap/temp.081").
-
-% Run tests and wait for the config gen_server to shutdown.
-run_tests(IniFiles, Tests) ->
-    {ok, Pid} = couch_config:start_link(IniFiles),
-    erlang:monitor(process, Pid),
-    Tests(),
-    couch_config:stop(),
-    receive
-        {'DOWN', _, _, Pid, _} -> ok;
-        _Other -> etap:diag("OTHER: ~p~n", [_Other])
-    after
-        1000 -> throw({timeout_error, config_stop})
-    end.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(17),
-
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    CheckStartStop = fun() -> ok end,
-    run_tests([default_config()], CheckStartStop),
-
-    CheckDefaults = fun() ->
-        etap:is(
-            couch_config:get("couchdb", "max_dbs_open"),
-            "100",
-            "{couchdb, max_dbs_open} is 100 by defualt."
-        ),
-
-        etap:is(
-            couch_config:get("httpd","port"),
-            "5984",
-            "{httpd, port} is 5984 by default"
-        ),
-
-        etap:is(
-            couch_config:get("fizbang", "unicode"),
-            undefined,
-            "{fizbang, unicode} is undefined by default"
-        )
-    end,
-
-    run_tests([default_config()], CheckDefaults),
-
-
-    % Check that subsequent files override values appropriately
-
-    CheckOverride = fun() ->
-        etap:is(
-            couch_config:get("couchdb", "max_dbs_open"),
-            "10",
-            "{couchdb, max_dbs_open} was overriden with the value 10"
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "4895",
-            "{httpd, port} was overriden with the value 4895"
-        )
-    end,
-
-    run_tests([default_config(), local_config_1()], CheckOverride),
-
-
-    % Check that overrides can create new sections
-
-    CheckOverride2 = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "80",
-            "{httpd, port} is overriden with the value 80"
-        ),
-
-        etap:is(
-            couch_config:get("fizbang", "unicode"),
-            "normalized",
-            "{fizbang, unicode} was created by override INI file"
-        )
-    end,
-
-    run_tests([default_config(), local_config_2()], CheckOverride2),
-
-
-    % Check that values can be overriden multiple times
-
-    CheckOverride3 = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "80",
-            "{httpd, port} value was taken from the last specified INI file."
-        )
-    end,
-
-    run_tests(
-        [default_config(), local_config_1(), local_config_2()],
-        CheckOverride3
-    ),
-
-    % Check persistence to last file.
-
-    % Empty the file in case it exists.
-    {ok, Fd} = file:open(local_config_write(), write),
-    ok = file:truncate(Fd),
-    ok = file:close(Fd),
-
-    % Open and write a value
-    CheckCanWrite = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "5984",
-            "{httpd, port} is still 5984 by default"
-        ),
-
-        etap:is(
-            couch_config:set("httpd", "port", "8080"),
-            ok,
-            "Writing {httpd, port} is kosher."
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "8080",
-            "{httpd, port} was updated to 8080 successfully."
-        ),
-
-        etap:is(
-            couch_config:delete("httpd", "bind_address"),
-            ok,
-            "Deleting {httpd, bind_address} succeeds"
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "bind_address"),
-            undefined,
-            "{httpd, bind_address} was actually deleted."
-        )
-    end,
-
-    run_tests([default_config(), local_config_write()], CheckCanWrite),
-
-    % Open and check where we don't expect persistence.
-
-    CheckDidntWrite = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "5984",
-            "{httpd, port} was not persisted to the primary INI file."
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "bind_address"),
-            "127.0.0.1",
-            "{httpd, bind_address} was not deleted form the primary INI file."
-        )
-    end,
-
-    run_tests([default_config()], CheckDidntWrite),
-
-    % Open and check we have only the persistence we expect.
-    CheckDidWrite = fun() ->
-        etap:is(
-            couch_config:get("httpd", "port"),
-            "8080",
-            "{httpd, port} is still 8080 after reopening the config."
-        ),
-
-        etap:is(
-            couch_config:get("httpd", "bind_address"),
-            undefined,
-            "{httpd, bind_address} is still \"\" after reopening."
-        )
-    end,
-
-    run_tests([local_config_write()], CheckDidWrite),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/3e666270/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 436a27b..07583ac 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,9 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    081-config-override.1.ini \
-    081-config-override.2.ini \
-    081-config-override.t \
     082-config-register.t \
     083-config-no-files.t \
     090-task-status.t \


[08/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 040-util.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/e2974721
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/e2974721
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/e2974721

Branch: refs/heads/1963-eunit
Commit: e297472175a3fdac5f3a0d946b3b29d2d4a67347
Parents: 0967755
Author: Alexander Shorin <kx...@apache.org>
Authored: Sat May 17 04:16:38 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:18 2014 +0400

----------------------------------------------------------------------
 test/couchdb/couch_util_tests.erl | 44 +++++++++++++++++++
 test/etap/040-util.t              | 80 ----------------------------------
 test/etap/Makefile.am             |  1 -
 3 files changed, 44 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/e2974721/test/couchdb/couch_util_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_util_tests.erl b/test/couchdb/couch_util_tests.erl
index 49688e2..d952f81 100644
--- a/test/couchdb/couch_util_tests.erl
+++ b/test/couchdb/couch_util_tests.erl
@@ -56,3 +56,47 @@ should_collate_ascii() ->
 
 should_collate_non_ascii() ->
     ?_assertEqual(-1, couch_util:collate(<<"A">>, <<"aa">>)).
+
+to_existed_atom_test() ->
+    ?assert(couch_util:to_existing_atom(true)),
+    ?assertMatch(foo, couch_util:to_existing_atom(<<"foo">>)),
+    ?assertMatch(foobarbaz, couch_util:to_existing_atom("foobarbaz")).
+
+implode_test() ->
+    ?assertEqual([1, 38, 2, 38, 3], couch_util:implode([1, 2, 3], "&")).
+
+trim_test() ->
+    lists:map(fun(S) -> ?assertEqual("foo", couch_util:trim(S)) end,
+              [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"]).
+
+abs_pathname_test() ->
+    {ok, Cwd} = file:get_cwd(),
+    ?assertEqual(Cwd ++ "/foo", couch_util:abs_pathname("./foo")).
+
+flush_test() ->
+    ?assertNot(couch_util:should_flush()),
+    AcquireMem = fun() ->
+        _IntsToAGazillion = lists:seq(1, 200000),
+        _LotsOfData = lists:map(fun(_) -> <<"foobar">> end,
+                                lists:seq(1, 500000)),
+        _BigBin = list_to_binary(_LotsOfData),
+
+        %% Allocation 200K tuples puts us above the memory threshold
+        %% Originally, there should be:
+        %%      ?assertNot(should_flush())
+        %% however, unlike for etap test, GC collects all allocated bits
+        %% making this conditions fail. So we have to invert the condition
+        %% since GC works, cleans the memory and everything is fine.
+        ?assertNot(couch_util:should_flush())
+    end,
+    AcquireMem(),
+
+    %% Checking to flush invokes GC
+    ?assertNot(couch_util:should_flush()).
+
+verify_test() ->
+    ?assert(couch_util:verify("It4Vooya", "It4Vooya")),
+    ?assertNot(couch_util:verify("It4VooyaX", "It4Vooya")),
+    ?assert(couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>)),
+    ?assertNot(couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>)),
+    ?assertNot(couch_util:verify(nil, <<"ahBase3r">>)).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e2974721/test/etap/040-util.t
----------------------------------------------------------------------
diff --git a/test/etap/040-util.t b/test/etap/040-util.t
deleted file mode 100755
index d57a32e..0000000
--- a/test/etap/040-util.t
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    application:start(crypto),
-
-    etap:plan(14),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    % to_existing_atom
-    etap:is(true, couch_util:to_existing_atom(true), "An atom is an atom."),
-    etap:is(foo, couch_util:to_existing_atom(<<"foo">>),
-        "A binary foo is the atom foo."),
-    etap:is(foobarbaz, couch_util:to_existing_atom("foobarbaz"),
-        "A list of atoms is one munged atom."),
-
-    % implode
-    etap:is([1, 38, 2, 38, 3], couch_util:implode([1,2,3],"&"),
-        "use & as separator in list."),
-
-    % trim
-    Strings = [" foo", "foo ", "\tfoo", " foo ", "foo\t", "foo\n", "\nfoo"],
-    etap:ok(lists:all(fun(S) -> couch_util:trim(S) == "foo" end, Strings),
-        "everything here trimmed should be foo."),
-
-    % abs_pathname
-    {ok, Cwd} = file:get_cwd(),
-    etap:is(Cwd ++ "/foo", couch_util:abs_pathname("./foo"),
-        "foo is in this directory."),
-
-    % should_flush
-    etap:ok(not couch_util:should_flush(),
-        "Not using enough memory to flush."),
-    AcquireMem = fun() ->
-        _IntsToAGazillion = lists:seq(1, 200000),
-        _LotsOfData = lists:map(
-            fun(Int) -> {Int, <<"foobar">>} end,
-        lists:seq(1, 500000)),
-        etap:ok(couch_util:should_flush(),
-            "Allocation 200K tuples puts us above the memory threshold.")
-    end,
-    AcquireMem(),
-
-    etap:ok(not couch_util:should_flush(),
-        "Checking to flush invokes GC."),
-
-    % verify
-    etap:is(true, couch_util:verify("It4Vooya", "It4Vooya"),
-         "String comparison."),
-    etap:is(false, couch_util:verify("It4VooyaX", "It4Vooya"),
-         "String comparison (unequal lengths)."),
-    etap:is(true, couch_util:verify(<<"ahBase3r">>, <<"ahBase3r">>),
-        "Binary comparison."),
-    etap:is(false, couch_util:verify(<<"ahBase3rX">>, <<"ahBase3r">>),
-        "Binary comparison (unequal lengths)."),
-    etap:is(false, couch_util:verify(nil, <<"ahBase3r">>),
-        "Binary comparison with atom."),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/e2974721/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index b40b095..64570a0 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    040-util.t \
     041-uuid-gen-id.ini \
     041-uuid-gen-seq.ini \
     041-uuid-gen-utc.ini \


[15/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 072-cleanup.t etap test suite to eunit

requests functions from test_util were moved to test_request module
with nicer and simpler API.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/d36c7980
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/d36c7980
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/d36c7980

Branch: refs/heads/1963-eunit
Commit: d36c7980f14566b3af914dc0d7e99826df3af30b
Parents: e722195
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue May 20 07:16:41 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am             |   3 +
 test/couchdb/couchdb_views_tests.erl | 125 +++++++++++++++++++++++++++++
 test/couchdb/test_request.erl        |  49 ++++++++++++
 test/etap/072-cleanup.t              | 126 ------------------------------
 test/etap/Makefile.am                |   1 -
 5 files changed, 177 insertions(+), 127 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/d36c7980/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index b8ad5ed..8d45866 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -14,6 +14,7 @@ noinst_SCRIPTS = run
 
 all:
 	mkdir -p {ebin,temp}
+	${ERLC} -oebin test_request.erl
 	chmod +x run
 
 eunit_files = \
@@ -27,6 +28,8 @@ eunit_files = \
     couch_stream_tests.erl \
     couch_key_tree_tests.erl \
     couch_db_tests.erl \
+    couchdb_views_tests.erl \
+    test_request.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d36c7980/test/couchdb/couchdb_views_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_views_tests.erl b/test/couchdb/couchdb_views_tests.erl
new file mode 100644
index 0000000..be361c5
--- /dev/null
+++ b/test/couchdb/couchdb_views_tests.erl
@@ -0,0 +1,125 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_views_tests).
+
+-include_lib("../../src/couchdb/couch_db.hrl").
+-include_lib("couchdb_tests.hrl").
+
+-define(ADMIN_USER, #user_ctx{roles=[<<"_admin">>]}).
+
+
+start() ->
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    % disable logging to reduce noise in stdout
+    couch_config:set("log", "level", "none", false),
+    ok.
+
+stop(_) ->
+    couch_server_sup:stop(),
+    ok.
+
+setup() ->
+    DbName = ?tempdb(),
+    {ok, _} = couch_db:create(DbName, [{user_ctx, ?ADMIN_USER}]),
+    FooRev = create_design_doc(DbName, <<"_design/foo">>, <<"bar">>),
+    ok = query_view(DbName, "foo", "bar"),
+    BooRev = create_design_doc(DbName, <<"_design/boo">>, <<"baz">>),
+    ok = query_view(DbName, "boo", "baz"),
+    {DbName, {FooRev, BooRev}}.
+
+teardown({DbName, _}) ->
+    ok = couch_server:delete(DbName, []),
+    ok.
+
+
+view_indexes_cleanup_test_() ->
+    {
+        "View indexes cleanup",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            {
+                foreach,
+                fun setup/0, fun teardown/1,
+                [
+                    fun should_have_two_indexes_alive_before_deletion/1,
+                    fun should_cleanup_index_file_after_ddoc_deletion/1,
+                    fun should_cleanup_all_index_files/1
+                ]
+            }
+        }
+    }.
+
+should_have_two_indexes_alive_before_deletion({DbName, _}) ->
+    view_cleanup(DbName),
+    ?_assertEqual(2, count_index_files(DbName)).
+
+should_cleanup_index_file_after_ddoc_deletion({DbName, {FooRev, _}}) ->
+    delete_design_doc(DbName, <<"_design/foo">>, FooRev),
+    view_cleanup(DbName),
+    ?_assertEqual(1, count_index_files(DbName)).
+
+should_cleanup_all_index_files({DbName, {FooRev, BooRev}})->
+    delete_design_doc(DbName, <<"_design/foo">>, FooRev),
+    delete_design_doc(DbName, <<"_design/boo">>, BooRev),
+    view_cleanup(DbName),
+    ?_assertEqual(0, count_index_files(DbName)).
+
+
+create_design_doc(DbName, DDName, ViewName) ->
+    {ok, Db} = couch_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DDName},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {ViewName, {[
+                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
+            ]}}
+        ]}}
+    ]}),
+    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+    Rev.
+
+delete_design_doc(DbName, DDName, Rev) ->
+    {ok, Db} = couch_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DDName},
+        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
+        {<<"_deleted">>, true}
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, [Rev]),
+    couch_db:close(Db).
+
+db_url(DbName) ->
+    Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+    "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ binary_to_list(DbName).
+
+query_view(DbName, DDoc, View) ->
+    {ok, Code, _Headers, _Body} = test_request:get(
+        db_url(DbName) ++ "/_design/" ++ DDoc ++ "/_view/" ++ View),
+    ?assertEqual(200, Code),
+    ok.
+
+view_cleanup(DbName) ->
+    {ok, Db} = couch_db:open(DbName, [{user_ctx, ?ADMIN_USER}]),
+    couch_mrview:cleanup(Db),
+    couch_db:close(Db).
+
+count_index_files(DbName) ->
+    % call server to fetch the index files
+    RootDir = couch_config:get("couchdb", "view_index_dir"),
+    length(filelib:wildcard(RootDir ++ "/." ++
+        binary_to_list(DbName) ++ "_design"++"/mrview/*")).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d36c7980/test/couchdb/test_request.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/test_request.erl b/test/couchdb/test_request.erl
new file mode 100644
index 0000000..cd6e310
--- /dev/null
+++ b/test/couchdb/test_request.erl
@@ -0,0 +1,49 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(test_request).
+
+-export([get/1, get/2]).
+-export([request/3, request/4]).
+
+get(Url) ->
+    request(get, Url, []).
+get(Url, Headers) ->
+    request(get, Url, Headers).
+
+request(Method, Url, Headers) ->
+    request(Method, Url, Headers, []).
+
+request(Method, Url, Headers, Body) ->
+    request(Method, Url, Headers, Body, 3).
+
+request(_Method, _Url, _Headers, _Body, 0) ->
+    {error, request_failed};
+request(Method, Url, Headers, Body, N) ->
+    case code:is_loaded(ibrowse) of
+        false ->
+            {ok, _} = ibrowse:start();
+        _ ->
+            ok
+    end,
+    case ibrowse:send_req(Url, Headers, Method, Body) of
+        {ok, Code0, RespHeaders, RespBody0} ->
+            Code = list_to_integer(Code0),
+            RespBody = iolist_to_binary(RespBody0),
+            {ok, Code, RespHeaders, RespBody};
+        {error, {'EXIT', {normal, _}}} ->
+            % Connection closed right after a successful request that
+            % used the same connection.
+            request(Method, Url, Headers, Body, N - 1);
+        Error ->
+            Error
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d36c7980/test/etap/072-cleanup.t
----------------------------------------------------------------------
diff --git a/test/etap/072-cleanup.t b/test/etap/072-cleanup.t
deleted file mode 100755
index 9cbcdfa..0000000
--- a/test/etap/072-cleanup.t
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--define(TEST_DB, <<"etap-test-db">>).
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--define(ADMIN_USER, #user_ctx{roles=[<<"_admin">>]}).
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(7),
-    try test() of
-        ok ->
-            etap:end_tests()
-    catch
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            timer:sleep(1000),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    {ok, _} = couch_server_sup:start_link(test_util:config_files()),
-    couch_server:delete(?TEST_DB, []),
-    timer:sleep(1000),
-
-    couch_db:create(?TEST_DB, []),
-
-    {ok, AllDbs} = couch_server:all_databases(),
-    etap:ok(lists:member(?TEST_DB, AllDbs), "Database was created."),
-
-    FooRev = create_design_doc(<<"_design/foo">>, <<"bar">>),
-    query_view("foo", "bar"),
-
-    BoozRev = create_design_doc(<<"_design/booz">>, <<"baz">>),
-    query_view("booz", "baz"),
-
-    {ok, _Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    view_cleanup(),
-    etap:is(count_index_files(), 2,
-        "Two index files before any deletions."),
-
-    delete_design_doc(<<"_design/foo">>, FooRev),
-    view_cleanup(),
-    etap:is(count_index_files(), 1,
-        "One index file after first deletion and cleanup."),
-
-    delete_design_doc(<<"_design/booz">>, BoozRev),
-    view_cleanup(),
-    etap:is(count_index_files(), 0,
-        "No index files after second deletion and cleanup."),
-
-    couch_server:delete(?TEST_DB, []),
-    {ok, AllDbs2} = couch_server:all_databases(),
-    etap:ok(not lists:member(?TEST_DB, AllDbs2),
-        "Database was deleted."),
-    ok.
-
-create_design_doc(DDName, ViewName) ->
-    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, DDName},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-            {ViewName, {[
-                {<<"map">>, <<"function(doc) { emit(doc.value, 1); }">>}
-            ]}}
-        ]}}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
-    couch_db:ensure_full_commit(Db),
-    couch_db:close(Db),
-    Rev.
-
-delete_design_doc(DDName, Rev) ->
-    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, DDName},
-        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
-        {<<"_deleted">>, true}
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, [Rev]),
-    couch_db:close(Db).
-
-db_url() ->
-    Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
-    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
-    "http://" ++ Addr ++ ":" ++ Port ++ "/" ++
-        binary_to_list(?TEST_DB).
-
-query_view(DDoc, View) ->
-    {ok, Code, _Headers, _Body} = test_util:request(
-        db_url() ++ "/_design/" ++ DDoc ++ "/_view/" ++ View, [], get),
-    etap:is(Code, 200, "Built view index for " ++ DDoc ++ "."),
-    ok.
-
-view_cleanup() ->
-    {ok, Db} = couch_db:open(?TEST_DB, [{user_ctx, ?ADMIN_USER}]),
-    couch_mrview:cleanup(Db),
-    couch_db:close(Db).
-
-count_index_files() ->
-    % call server to fetch the index files
-    RootDir = couch_config:get("couchdb", "view_index_dir"),
-    length(filelib:wildcard(RootDir ++ "/." ++
-        binary_to_list(?TEST_DB) ++ "_design"++"/mrview/*")).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d36c7980/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 408ca40..4e4997e 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    072-cleanup.t \
     073-changes.t \
     074-doc-update-conflicts.t \
     075-auth-cache.t \


[33/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 170-os-daemons.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/f55d2c2a
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/f55d2c2a
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/f55d2c2a

Branch: refs/heads/1963-eunit
Commit: f55d2c2afb0e81fc8ba4c992a82e029cf6071106
Parents: ad4b940
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 12:32:02 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:53:06 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                       |   2 +
 test/couchdb/couchdb_os_daemons_tests.erl      | 144 ++++++++++++++++++++
 test/couchdb/fixtures/os_daemon_looper.escript |  26 ++++
 test/etap/170-os-daemons.es                    |  26 ----
 test/etap/170-os-daemons.t                     | 114 ----------------
 test/etap/Makefile.am                          |   2 -
 6 files changed, 172 insertions(+), 142 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/f55d2c2a/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index cfe18db..7dd0011 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -39,6 +39,7 @@ eunit_files = \
     couch_stats_tests.erl \
     couchdb_attachments_tests.erl \
     couchdb_vhosts_tests.erl \
+    couchdb_os_daemons_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 
@@ -47,6 +48,7 @@ fixture_files = \
     fixtures/couch_config_tests_2.ini \
     fixtures/couch_stats_aggregates.cfg \
     fixtures/couch_stats_aggregates.ini \
+    fixtures/os_daemon_looper.escript \
     fixtures/logo.png
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f55d2c2a/test/couchdb/couchdb_os_daemons_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_os_daemons_tests.erl b/test/couchdb/couchdb_os_daemons_tests.erl
new file mode 100644
index 0000000..f3c75df
--- /dev/null
+++ b/test/couchdb/couchdb_os_daemons_tests.erl
@@ -0,0 +1,144 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_os_daemons_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+%% keep in sync with couchdb/couch_os_daemons.erl
+-record(daemon, {
+    port,
+    name,
+    cmd,
+    kill,
+    status=running,
+    cfg_patterns=[],
+    errors=[],
+    buf=[]
+}).
+
+-define(DAEMON_LOOPER, "os_daemon_looper.escript").
+-define(DELAY, 100).
+
+
+setup(DName) ->
+    {ok, CfgPid} = couch_config:start_link(?CONFIG_CHAIN),
+    {ok, OsDPid} = couch_os_daemons:start_link(),
+    couch_config:set("os_daemons", DName,
+                     filename:join([?FIXTURESDIR, DName]), false),
+    timer:sleep(?DELAY),  % short delay to let daemon set kill flag
+    {CfgPid, OsDPid}.
+
+teardown(_, {CfgPid, OsDPid}) ->
+    erlang:monitor(process, CfgPid),
+    couch_config:stop(),
+    receive
+        {'DOWN', _, _, CfgPid, _} ->
+            ok
+    after 1000 ->
+        throw({timeout, config_stop})
+    end,
+
+    erlang:monitor(process, OsDPid),
+    exit(OsDPid, normal),
+    receive
+        {'DOWN', _, _, OsDPid, _} ->
+            ok
+    after 1000 ->
+        throw({timeout, os_daemon_stop})
+    end.
+
+
+os_daemons_test_() ->
+    {
+        "OS Daemons tests",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{?DAEMON_LOOPER, Fun} || Fun <- [
+                fun should_check_daemon/2,
+                fun should_check_daemon_table_form/2,
+                fun should_clean_tables_on_daemon_remove/2,
+                fun should_spawn_multiple_daemons/2,
+                fun should_keep_alive_one_daemon_on_killing_other/2
+            ]]
+        }
+    }.
+
+
+should_check_daemon(DName, _) ->
+    ?_test(begin
+        {ok, [D]} = couch_os_daemons:info([table]),
+        check_daemon(D, DName)
+    end).
+
+should_check_daemon_table_form(DName, _) ->
+    ?_test(begin
+        {ok, Tab} = couch_os_daemons:info(),
+        [D] = ets:tab2list(Tab),
+        check_daemon(D, DName)
+    end).
+
+should_clean_tables_on_daemon_remove(DName, _) ->
+    ?_test(begin
+        couch_config:delete("os_daemons", DName, false),
+        {ok, Tab2} = couch_os_daemons:info(),
+        ?_assertEqual([], ets:tab2list(Tab2))
+    end).
+
+should_spawn_multiple_daemons(DName, _) ->
+    ?_test(begin
+        couch_config:set("os_daemons", "bar",
+                         filename:join([?FIXTURESDIR, DName]), false),
+        couch_config:set("os_daemons", "baz",
+                         filename:join([?FIXTURESDIR, DName]), false),
+        timer:sleep(?DELAY),
+        {ok, Daemons} = couch_os_daemons:info([table]),
+        lists:foreach(fun(D) ->
+            check_daemon(D)
+        end, Daemons),
+        {ok, Tab} = couch_os_daemons:info(),
+        lists:foreach(fun(D) ->
+            check_daemon(D)
+        end, ets:tab2list(Tab))
+    end).
+
+should_keep_alive_one_daemon_on_killing_other(DName, _) ->
+    ?_test(begin
+        couch_config:set("os_daemons", "bar",
+                         filename:join([?FIXTURESDIR, DName]), false),
+        timer:sleep(?DELAY),
+        {ok, Daemons} = couch_os_daemons:info([table]),
+        lists:foreach(fun(D) ->
+            check_daemon(D)
+        end, Daemons),
+
+        couch_config:delete("os_daemons", "bar", false),
+        timer:sleep(?DELAY),
+        {ok, [D2]} = couch_os_daemons:info([table]),
+        check_daemon(D2, DName),
+
+        {ok, Tab} = couch_os_daemons:info(),
+        [T] = ets:tab2list(Tab),
+        check_daemon(T, DName)
+    end).
+
+
+check_daemon(D) ->
+    check_daemon(D, D#daemon.name).
+
+check_daemon(D, Name) ->
+    ?assert(is_port(D#daemon.port)),
+    ?assertEqual(Name, D#daemon.name),
+    ?assertNotEqual(undefined, D#daemon.kill),
+    ?assertEqual([], D#daemon.errors),
+    ?assertEqual([], D#daemon.buf).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f55d2c2a/test/couchdb/fixtures/os_daemon_looper.escript
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/os_daemon_looper.escript b/test/couchdb/fixtures/os_daemon_looper.escript
new file mode 100755
index 0000000..73974e9
--- /dev/null
+++ b/test/couchdb/fixtures/os_daemon_looper.escript
@@ -0,0 +1,26 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+loop() ->
+    loop(io:read("")).
+
+loop({ok, _}) ->
+    loop(io:read(""));
+loop(eof) ->
+    stop;
+loop({error, Reason}) ->
+    throw({error, Reason}).
+
+main([]) ->
+    loop().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f55d2c2a/test/etap/170-os-daemons.es
----------------------------------------------------------------------
diff --git a/test/etap/170-os-daemons.es b/test/etap/170-os-daemons.es
deleted file mode 100755
index 73974e9..0000000
--- a/test/etap/170-os-daemons.es
+++ /dev/null
@@ -1,26 +0,0 @@
-#! /usr/bin/env escript
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-loop() ->
-    loop(io:read("")).
-
-loop({ok, _}) ->
-    loop(io:read(""));
-loop(eof) ->
-    stop;
-loop({error, Reason}) ->
-    throw({error, Reason}).
-
-main([]) ->
-    loop().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f55d2c2a/test/etap/170-os-daemons.t
----------------------------------------------------------------------
diff --git a/test/etap/170-os-daemons.t b/test/etap/170-os-daemons.t
deleted file mode 100755
index 6feaa1b..0000000
--- a/test/etap/170-os-daemons.t
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-config_files() ->
-    lists:map(fun test_util:build_file/1, [
-        "etc/couchdb/default_dev.ini"
-    ]).
-
-daemon_cmd() ->
-    test_util:source_file("test/etap/170-os-daemons.es").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(49),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(config_files()),
-    couch_os_daemons:start_link(),
-
-    etap:diag("Daemons boot after configuration added."),
-    couch_config:set("os_daemons", "foo", daemon_cmd(), false),
-    timer:sleep(1000),
-    
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, "foo"),
-
-    % Check table form
-    {ok, Tab1} = couch_os_daemons:info(),
-    [T1] = ets:tab2list(Tab1),
-    check_daemon(T1, "foo"),
-
-    etap:diag("Daemons stop after configuration removed."),
-    couch_config:delete("os_daemons", "foo", false),
-    timer:sleep(500),
-    
-    {ok, []} = couch_os_daemons:info([table]),
-    {ok, Tab2} = couch_os_daemons:info(),
-    etap:is(ets:tab2list(Tab2), [], "As table returns empty table."),
-    
-    etap:diag("Adding multiple daemons causes both to boot."),
-    couch_config:set("os_daemons", "bar", daemon_cmd(), false),
-    couch_config:set("os_daemons", "baz", daemon_cmd(), false),
-    timer:sleep(500),
-    {ok, Daemons} = couch_os_daemons:info([table]),
-    lists:foreach(fun(D) ->
-        check_daemon(D)
-    end, Daemons),
-
-    {ok, Tab3} = couch_os_daemons:info(),
-    lists:foreach(fun(D) ->
-        check_daemon(D)
-    end, ets:tab2list(Tab3)),
-    
-    etap:diag("Removing one daemon leaves the other alive."),
-    couch_config:delete("os_daemons", "bar", false),
-    timer:sleep(500),
-    
-    {ok, [D2]} = couch_os_daemons:info([table]),
-    check_daemon(D2, "baz"),
-    
-    % Check table version
-    {ok, Tab4} = couch_os_daemons:info(),
-    [T4] = ets:tab2list(Tab4),
-    check_daemon(T4, "baz"),
-    
-    ok.
-
-check_daemon(D) ->
-    check_daemon(D, D#daemon.name).
-
-check_daemon(D, Name) ->
-    BaseName = "170-os-daemons.es",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.errors, [], "No errors occurred while booting."),
-    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f55d2c2a/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index a19cab9..13b5b4a 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,8 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    170-os-daemons.es \
-    170-os-daemons.t \
     171-os-daemons-config.es \
     171-os-daemons-config.t \
     172-os-daemon-errors.1.sh \


[02/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 001-load.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/a57684c2
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/a57684c2
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/a57684c2

Branch: refs/heads/1963-eunit
Commit: a57684c2eb7d15c0fc72885910a0029f8f212f03
Parents: 9427d69
Author: Alexander Shorin <kx...@apache.org>
Authored: Fri May 16 04:45:38 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:16:40 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                    |  1 +
 test/couchdb/couchdb_modules_load_tests.erl | 69 ++++++++++++++++++++++++
 test/etap/001-load.t                        | 68 -----------------------
 test/etap/Makefile.am                       |  1 -
 4 files changed, 70 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/a57684c2/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index fac5648..4802082 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -17,6 +17,7 @@ all:
 	chmod +x run
 
 eunit_files = \
+    couchdb_modules_load_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a57684c2/test/couchdb/couchdb_modules_load_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_modules_load_tests.erl b/test/couchdb/couchdb_modules_load_tests.erl
new file mode 100644
index 0000000..eb1ddcb
--- /dev/null
+++ b/test/couchdb/couchdb_modules_load_tests.erl
@@ -0,0 +1,69 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+
+-module(couchdb_modules_load_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+
+modules_load_test_() ->
+    {
+        "Verify that all modules loads",
+        should_load_modules()
+    }.
+
+
+should_load_modules() ->
+    Modules = [
+        couch_auth_cache,
+        couch_btree,
+        couch_changes,
+        couch_compress,
+        couch_config,
+        couch_config_writer,
+        couch_db,
+        couch_db_update_notifier,
+        couch_db_update_notifier_sup,
+        couch_db_updater,
+        couch_doc,
+        % Fails unless couch_config gen_server is started.
+        % couch_ejson_compare,
+        couch_event_sup,
+        couch_external_manager,
+        couch_external_server,
+        couch_file,
+        couch_httpd,
+        couch_httpd_db,
+        couch_httpd_external,
+        couch_httpd_misc_handlers,
+        couch_httpd_rewrite,
+        couch_httpd_stats_handlers,
+        couch_key_tree,
+        couch_log,
+        couch_os_process,
+        couch_query_servers,
+        couch_ref_counter,
+        couch_server,
+        couch_server_sup,
+        couch_stats_aggregator,
+        couch_stats_collector,
+        couch_stream,
+        couch_task_status,
+        couch_util,
+        couch_work_queue,
+        json_stream_parse
+    ],
+    [should_load_module(Mod) || Mod <- Modules].
+
+should_load_module(Mod) ->
+    {atom_to_list(Mod), ?_assertMatch({module, _}, code:load_file(Mod))}.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a57684c2/test/etap/001-load.t
----------------------------------------------------------------------
diff --git a/test/etap/001-load.t b/test/etap/001-load.t
deleted file mode 100755
index 5ce0d93..0000000
--- a/test/etap/001-load.t
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% Test that we can load each module.
-
-main(_) ->
-    test_util:init_code_path(),
-    Modules = [
-        couch_auth_cache,
-        couch_btree,
-        couch_changes,
-        couch_compress,
-        couch_config,
-        couch_config_writer,
-        couch_db,
-        couch_db_update_notifier,
-        couch_db_update_notifier_sup,
-        couch_db_updater,
-        couch_doc,
-        % Fails unless couch_config gen_server is started.
-        % couch_ejson_compare,
-        couch_event_sup,
-        couch_external_manager,
-        couch_external_server,
-        couch_file,
-        couch_httpd,
-        couch_httpd_db,
-        couch_httpd_external,
-        couch_httpd_misc_handlers,
-        couch_httpd_rewrite,
-        couch_httpd_stats_handlers,
-        couch_key_tree,
-        couch_log,
-        couch_os_process,
-        couch_query_servers,
-        couch_ref_counter,
-        couch_server,
-        couch_server_sup,
-        couch_stats_aggregator,
-        couch_stats_collector,
-        couch_stream,
-        couch_task_status,
-        couch_util,
-        couch_work_queue,
-        json_stream_parse
-    ],
-
-    etap:plan(length(Modules)),
-    lists:foreach(
-        fun(Module) ->
-            etap:loaded_ok(
-                Module,
-                lists:concat(["Loaded: ", Module])
-            )
-        end, Modules),
-    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/a57684c2/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index c9778ca..ff7f730 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    001-load.t \
     002-icu-driver.t \
     010-file-basics.t \
     011-file-headers.t \


[22/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 077-couch-db-fast-db-delete-create.t etap test suite to eunit

Merged into couch_db_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/cfc6f6a1
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/cfc6f6a1
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/cfc6f6a1

Branch: refs/heads/1963-eunit
Commit: cfc6f6a1dd72cf10dc718c70f8e2a6ed0e2535c0
Parents: db5d227
Author: Alexander Shorin <kx...@apache.org>
Authored: Thu May 22 20:31:09 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:26 2014 +0400

----------------------------------------------------------------------
 test/couchdb/couch_db_tests.erl                | 22 +++++++-
 test/etap/077-couch-db-fast-db-delete-create.t | 61 ---------------------
 test/etap/Makefile.am                          |  1 -
 3 files changed, 21 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/cfc6f6a1/test/couchdb/couch_db_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_db_tests.erl b/test/couchdb/couch_db_tests.erl
index d651126..5fd40d7 100644
--- a/test/couchdb/couch_db_tests.erl
+++ b/test/couchdb/couch_db_tests.erl
@@ -14,6 +14,7 @@
 
 -include_lib("couchdb_tests.hrl").
 
+-define(TIMETOUT, 1200000).
 
 setup() ->
     {ok, _} = couch_server_sup:start_link(?CONFIG_CHAIN),
@@ -33,7 +34,8 @@ create_delete_db_test_()->
                 [should_create_db(),
                  should_delete_db(),
                  should_create_multiple_dbs(),
-                 should_delete_multiple_dbs()]
+                 should_delete_multiple_dbs(),
+                 should_create_delete_database_continuously()]
             end
         }
     }.
@@ -88,3 +90,21 @@ should_delete_multiple_dbs() ->
     end, 0, DbNames),
 
     ?_assertEqual(NumDeleted, 6).
+
+should_create_delete_database_continuously() ->
+    DbName = ?tempdb(),
+    {ok, _} = couch_db:create(DbName, []),
+    [{timeout, ?TIMETOUT div 1000, {integer_to_list(N) ++ " times",
+                                    ?_assert(loop(DbName, N))}}
+     || N <- [10, 100, 1000]].
+
+loop(_, 0) ->
+    true;
+loop(DbName, N) ->
+    ok = cycle(DbName),
+    loop(DbName, N - 1).
+
+cycle(DbName) ->
+    ok = couch_server:delete(DbName, []),
+    {ok, _Db} = couch_db:create(DbName, []),
+    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/cfc6f6a1/test/etap/077-couch-db-fast-db-delete-create.t
----------------------------------------------------------------------
diff --git a/test/etap/077-couch-db-fast-db-delete-create.t b/test/etap/077-couch-db-fast-db-delete-create.t
deleted file mode 100644
index 2026698..0000000
--- a/test/etap/077-couch-db-fast-db-delete-create.t
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-
-    test_util:init_code_path(),
-
-    etap:plan(unknown),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            Msg = io_lib:format("Test died abnormally: ~p", [Other]),
-            etap:diag(Msg),
-            etap:bail(Msg)
-        end,
-    ok.
-
-loop(0) ->
-    ok;
-loop(N) ->
-    ok = cycle(),
-    loop(N - 1).
-
-cycle() ->
-    ok = couch_server:delete(<<"etap-test-db">>, []),
-    {ok, _Db} = couch_db:create(<<"etap-test-db">>, []),
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-
-    {ok, _Db} = couch_db:create(<<"etap-test-db">>, []),
-
-    ok = loop(1),
-    ok = loop(10),
-    ok = loop(100),
-    ok = loop(1000),
-
-    % for more thorough testing:
-    % ok = loop(10000),
-    % ok = loop(100000),
-    % ok = loop(1000000),
-    % ok = loop(10000000),
-
-    ok = couch_server:delete(<<"etap-test-db">>, []),
-
-    etap:is(true, true, "lots of creating and deleting of a database"),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/cfc6f6a1/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index d950e46..3d0ad08 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    077-couch-db-fast-db-delete-create.t \
     080-config-get-set.t \
     081-config-override.1.ini \
     081-config-override.2.ini \


[09/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 074-doc-update-conflicts.t etap test suite to eunit

Timeout decreased, added 10K clients case


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/5cdf7a0c
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/5cdf7a0c
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/5cdf7a0c

Branch: refs/heads/1963-eunit
Commit: 5cdf7a0c8d9d28999d86ca36a89519a829eba3a4
Parents: b337902
Author: Alexander Shorin <kx...@apache.org>
Authored: Wed May 21 18:58:12 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                        |   1 +
 test/couchdb/couchdb_update_conflicts_tests.erl | 237 +++++++++++++++++++
 test/etap/074-doc-update-conflicts.t            | 218 -----------------
 test/etap/Makefile.am                           |   1 -
 4 files changed, 238 insertions(+), 219 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/5cdf7a0c/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 58429e7..2c6dd64 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -30,6 +30,7 @@ eunit_files = \
     couch_db_tests.erl \
     couchdb_views_tests.erl \
     couch_changes_tests.erl \
+    couchdb_update_conflicts_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5cdf7a0c/test/couchdb/couchdb_update_conflicts_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_update_conflicts_tests.erl b/test/couchdb/couchdb_update_conflicts_tests.erl
new file mode 100644
index 0000000..860b728
--- /dev/null
+++ b/test/couchdb/couchdb_update_conflicts_tests.erl
@@ -0,0 +1,237 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_update_conflicts_tests).
+
+-include_lib("../../src/couchdb/couch_db.hrl").
+-include_lib("couchdb_tests.hrl").
+
+-define(i2l(I), integer_to_list(I)).
+-define(ADMIN_USER, {#user_ctx{roles=[<<"_admin">>]}}).
+-define(DOC_ID, <<"foobar">>).
+-define(NUM_CLIENTS, [100, 500, 1000, 2000, 5000, 10000]).
+-define(TIMEOUT, 10000).
+
+
+start() ->
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    couch_config:set("couchdb", "delayed_commits", "true", false),
+    ok.
+
+stop(_) ->
+    couch_server_sup:stop(),
+    ok.
+
+setup() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, [?ADMIN_USER, overwrite]),
+    Doc = couch_doc:from_json_obj({[{<<"_id">>, ?DOC_ID},
+                                    {<<"value">>, 0}]}),
+    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
+    ok = couch_db:close(Db),
+    RevStr = couch_doc:rev_to_str(Rev),
+    {DbName, RevStr}.
+setup(_) ->
+    setup().
+
+teardown({DbName, _}) ->
+    ok = couch_server:delete(DbName, []),
+    ok.
+teardown(_, {DbName, _RevStr}) ->
+    teardown({DbName, _RevStr}).
+
+
+view_indexes_cleanup_test_() ->
+    {
+        "Update conflicts",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            [
+                concurrent_updates(),
+                couchdb_188()
+            ]
+        }
+    }.
+
+concurrent_updates()->
+    {
+        "Concurrent updates",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{NumClients, fun should_concurrently_update_doc/2}
+             || NumClients <- ?NUM_CLIENTS]
+        }
+    }.
+
+couchdb_188()->
+    {
+        "COUCHDB-188",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [fun should_bulk_create_delete_doc/1]
+        }
+    }.
+
+
+should_concurrently_update_doc(NumClients, {DbName, InitRev})->
+     {?i2l(NumClients) ++ " clients",
+      {inorder,
+       [{"update doc",
+         {timeout, ?TIMEOUT div 1000,
+          ?_test(concurrent_doc_update(NumClients, DbName, InitRev))}},
+        {"ensure in single leaf",
+         ?_test(ensure_in_single_revision_leaf(DbName))}]}}.
+
+should_bulk_create_delete_doc({DbName, InitRev})->
+    ?_test(bulk_delete_create(DbName, InitRev)).
+
+
+concurrent_doc_update(NumClients, DbName, InitRev) ->
+    Clients = lists:map(
+        fun(Value) ->
+            ClientDoc = couch_doc:from_json_obj({[
+                {<<"_id">>, ?DOC_ID},
+                {<<"_rev">>, InitRev},
+                {<<"value">>, Value}
+            ]}),
+            Pid = spawn_client(DbName, ClientDoc),
+            {Value, Pid, erlang:monitor(process, Pid)}
+        end,
+        lists:seq(1, NumClients)),
+
+    lists:foreach(fun({_, Pid, _}) -> Pid ! go end, Clients),
+
+    {NumConflicts, SavedValue} = lists:foldl(
+        fun({Value, Pid, MonRef}, {AccConflicts, AccValue}) ->
+            receive
+                {'DOWN', MonRef, process, Pid, {ok, _NewRev}} ->
+                    {AccConflicts, Value};
+                {'DOWN', MonRef, process, Pid, conflict} ->
+                    {AccConflicts + 1, AccValue};
+                {'DOWN', MonRef, process, Pid, Error} ->
+                    erlang:error({assertion_failed,
+                         [{module, ?MODULE},
+                          {line, ?LINE},
+                          {reason, "Client " ++ ?i2l(Value)
+                                             ++ " got update error: "
+                                             ++ couch_util:to_list(Error)}]})
+            after ?TIMEOUT div 2 ->
+                 erlang:error({assertion_failed,
+                         [{module, ?MODULE},
+                          {line, ?LINE},
+                          {reason, "Timeout waiting for client "
+                                   ++ ?i2l(Value) ++ " to die"}]})
+            end
+        end, {0, nil}, Clients),
+    ?assertEqual(NumClients - 1, NumConflicts),
+
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, Leaves} = couch_db:open_doc_revs(Db, ?DOC_ID, all, []),
+    ok = couch_db:close(Db),
+    ?assertEqual(1, length(Leaves)),
+
+    [{ok, Doc2}] = Leaves,
+    {JsonDoc} = couch_doc:to_json_obj(Doc2, []),
+    ?assertEqual(SavedValue, couch_util:get_value(<<"value">>, JsonDoc)).
+
+ensure_in_single_revision_leaf(DbName) ->
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, Leaves} = couch_db:open_doc_revs(Db, ?DOC_ID, all, []),
+    ok = couch_db:close(Db),
+    [{ok, Doc}] = Leaves,
+
+    stop(ok),
+    start(),
+    
+    {ok, Db2} = couch_db:open_int(DbName, []),
+    {ok, Leaves2} = couch_db:open_doc_revs(Db2, ?DOC_ID, all, []),
+    ok = couch_db:close(Db2),
+    ?assertEqual(1, length(Leaves2)),
+
+    [{ok, Doc2}] = Leaves,
+    ?assertEqual(Doc, Doc2).
+    
+bulk_delete_create(DbName, InitRev) ->
+    {ok, Db} = couch_db:open_int(DbName, []),
+    
+    DeletedDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, ?DOC_ID},
+        {<<"_rev">>, InitRev},
+        {<<"_deleted">>, true}
+    ]}),
+    NewDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, ?DOC_ID},
+        {<<"value">>, 666}
+    ]}),
+
+    {ok, Results} = couch_db:update_docs(Db, [DeletedDoc, NewDoc], []),
+    ok = couch_db:close(Db),
+
+    ?assertEqual(2, length([ok || {ok, _} <- Results])),
+    [{ok, Rev1}, {ok, Rev2}] = Results,
+    
+    {ok, Db2} = couch_db:open_int(DbName, []),
+    {ok, [{ok, Doc1}]} = couch_db:open_doc_revs(
+        Db2, ?DOC_ID, [Rev1], [conflicts, deleted_conflicts]),
+    {ok, [{ok, Doc2}]} = couch_db:open_doc_revs(
+        Db2, ?DOC_ID, [Rev2], [conflicts, deleted_conflicts]),
+    ok = couch_db:close(Db2),
+
+    {Doc1Props} = couch_doc:to_json_obj(Doc1, []),
+    {Doc2Props} = couch_doc:to_json_obj(Doc2, []),
+
+    %% Document was deleted
+    ?assert(couch_util:get_value(<<"_deleted">>, Doc1Props)),
+    %% New document not flagged as deleted
+    ?assertEqual(undefined, couch_util:get_value(<<"_deleted">>,
+                                                 Doc2Props)),
+    %% New leaf revision has the right value
+    ?assertEqual(666, couch_util:get_value(<<"value">>,
+                                           Doc2Props)),
+    %% Deleted document has no conflicts
+    ?assertEqual(undefined, couch_util:get_value(<<"_conflicts">>,
+                                                 Doc1Props)),
+    %% Deleted document has no deleted conflicts
+    ?assertEqual(undefined, couch_util:get_value(<<"_deleted_conflicts">>,
+                                                 Doc1Props)),
+    %% New leaf revision doesn't have conflicts
+    ?assertEqual(undefined, couch_util:get_value(<<"_conflicts">>,
+                                                 Doc1Props)),
+    %% New leaf revision doesn't have deleted conflicts
+    ?assertEqual(undefined, couch_util:get_value(<<"_deleted_conflicts">>,
+                                                 Doc1Props)),
+
+    %% Deleted revision has position 2
+    ?assertEqual(2, element(1, Rev1)),
+    %% New leaf revision has position 1
+    ?assertEqual(1, element(1, Rev2)).
+
+
+spawn_client(DbName, Doc) ->
+    spawn(fun() ->
+        {ok, Db} = couch_db:open_int(DbName, []),
+        receive
+            go -> ok
+        end,
+        erlang:yield(),
+        Result = try
+            couch_db:update_doc(Db, Doc, [])
+        catch _:Error ->
+            Error
+        end,
+        ok = couch_db:close(Db),
+        exit(Result)
+    end).
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5cdf7a0c/test/etap/074-doc-update-conflicts.t
----------------------------------------------------------------------
diff --git a/test/etap/074-doc-update-conflicts.t b/test/etap/074-doc-update-conflicts.t
deleted file mode 100755
index 09d0633..0000000
--- a/test/etap/074-doc-update-conflicts.t
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
--define(i2l(I), integer_to_list(I)).
-
-test_db_name() -> <<"couch_test_update_conflicts">>.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(35),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    couch_config:set("couchdb", "delayed_commits", "true", false),
-
-    lists:foreach(
-        fun(NumClients) -> test_concurrent_doc_update(NumClients) end,
-        [100, 500, 1000, 2000, 5000]),
-
-    test_bulk_delete_create(),
-
-    couch_server_sup:stop(),
-    ok.
-
-
-% Verify that if multiple clients try to update the same document
-% simultaneously, only one of them will get success response and all
-% the other ones will get a conflict error. Also validate that the
-% client which got the success response got its document version
-% persisted into the database.
-test_concurrent_doc_update(NumClients) ->
-    {ok, Db} = create_db(test_db_name()),
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"value">>, 0}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-    ok = couch_db:close(Db),
-    RevStr = couch_doc:rev_to_str(Rev),
-    etap:diag("Created first revision of test document"),
-
-    etap:diag("Spawning " ++ ?i2l(NumClients) ++
-        " clients to update the document"),
-    Clients = lists:map(
-        fun(Value) ->
-            ClientDoc = couch_doc:from_json_obj({[
-                {<<"_id">>, <<"foobar">>},
-                {<<"_rev">>, RevStr},
-                {<<"value">>, Value}
-            ]}),
-            Pid = spawn_client(ClientDoc),
-            {Value, Pid, erlang:monitor(process, Pid)}
-        end,
-        lists:seq(1, NumClients)),
-
-    lists:foreach(fun({_, Pid, _}) -> Pid ! go end, Clients),
-    etap:diag("Waiting for clients to finish"),
-
-    {NumConflicts, SavedValue} = lists:foldl(
-        fun({Value, Pid, MonRef}, {AccConflicts, AccValue}) ->
-            receive
-            {'DOWN', MonRef, process, Pid, {ok, _NewRev}} ->
-                {AccConflicts, Value};
-            {'DOWN', MonRef, process, Pid, conflict} ->
-                {AccConflicts + 1, AccValue};
-            {'DOWN', MonRef, process, Pid, Error} ->
-                etap:bail("Client " ++ ?i2l(Value) ++
-                    " got update error: " ++ couch_util:to_list(Error))
-            after 60000 ->
-                etap:bail("Timeout waiting for client " ++ ?i2l(Value) ++ " to die")
-            end
-        end,
-        {0, nil},
-        Clients),
-
-    etap:diag("Verifying client results"),
-    etap:is(
-        NumConflicts,
-        NumClients - 1,
-        "Got " ++ ?i2l(NumClients - 1) ++ " client conflicts"),
-
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-    {ok, Leaves} = couch_db:open_doc_revs(Db2, <<"foobar">>, all, []),
-    ok = couch_db:close(Db2),
-    etap:is(length(Leaves), 1, "Only one document revision was persisted"),
-    [{ok, Doc2}] = Leaves,
-    {JsonDoc} = couch_doc:to_json_obj(Doc2, []),
-    etap:is(
-        couch_util:get_value(<<"value">>, JsonDoc),
-        SavedValue,
-        "Persisted doc has the right value"),
-
-    ok = timer:sleep(1000),
-    etap:diag("Restarting the server"),
-    couch_server_sup:stop(),
-    ok = timer:sleep(1000),
-    couch_server_sup:start_link(test_util:config_files()),
-
-    {ok, Db3} = couch_db:open_int(test_db_name(), []),
-    {ok, Leaves2} = couch_db:open_doc_revs(Db3, <<"foobar">>, all, []),
-    ok = couch_db:close(Db3),
-    etap:is(length(Leaves2), 1, "Only one document revision was persisted"),
-    [{ok, Doc3}] = Leaves,
-    etap:is(Doc3, Doc2, "Got same document after server restart"),
-
-    delete_db(Db3).
-
-
-% COUCHDB-188
-test_bulk_delete_create() ->
-    {ok, Db} = create_db(test_db_name()),
-    Doc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"value">>, 0}
-    ]}),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-
-    DeletedDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"_rev">>, couch_doc:rev_to_str(Rev)},
-        {<<"_deleted">>, true}
-    ]}),
-    NewDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, <<"foobar">>},
-        {<<"value">>, 666}
-    ]}),
-
-    {ok, Results} = couch_db:update_docs(Db, [DeletedDoc, NewDoc], []),
-    ok = couch_db:close(Db),
-
-    etap:is(length([ok || {ok, _} <- Results]), 2,
-        "Deleted and non-deleted versions got an ok reply"),
-
-    [{ok, Rev1}, {ok, Rev2}] = Results,
-    {ok, Db2} = couch_db:open_int(test_db_name(), []),
-
-    {ok, [{ok, Doc1}]} = couch_db:open_doc_revs(
-        Db2, <<"foobar">>, [Rev1], [conflicts, deleted_conflicts]),
-    {ok, [{ok, Doc2}]} = couch_db:open_doc_revs(
-        Db2, <<"foobar">>, [Rev2], [conflicts, deleted_conflicts]),
-    ok = couch_db:close(Db2),
-
-    {Doc1Props} = couch_doc:to_json_obj(Doc1, []),
-    {Doc2Props} = couch_doc:to_json_obj(Doc2, []),
-
-    etap:is(couch_util:get_value(<<"_deleted">>, Doc1Props), true,
-        "Document was deleted"),
-    etap:is(couch_util:get_value(<<"_deleted">>, Doc2Props), undefined,
-        "New document not flagged as deleted"),
-    etap:is(couch_util:get_value(<<"value">>, Doc2Props), 666,
-        "New leaf revision has the right value"),
-    etap:is(couch_util:get_value(<<"_conflicts">>, Doc1Props), undefined,
-        "Deleted document has no conflicts"),
-    etap:is(couch_util:get_value(<<"_deleted_conflicts">>, Doc1Props), undefined,
-        "Deleted document has no deleted conflicts"),
-    etap:is(couch_util:get_value(<<"_conflicts">>, Doc2Props), undefined,
-        "New leaf revision doesn't have conflicts"),
-    etap:is(couch_util:get_value(<<"_deleted_conflicts">>, Doc2Props), undefined,
-        "New leaf revision doesn't have deleted conflicts"),
-
-    etap:is(element(1, Rev1), 2, "Deleted revision has position 2"),
-    etap:is(element(1, Rev2), 1, "New leaf revision has position 1"),
-
-    delete_db(Db2).
-
-
-spawn_client(Doc) ->
-    spawn(fun() ->
-        {ok, Db} = couch_db:open_int(test_db_name(), []),
-        receive go -> ok end,
-        erlang:yield(),
-        Result = try
-            couch_db:update_doc(Db, Doc, [])
-        catch _:Error ->
-            Error
-        end,
-        ok = couch_db:close(Db),
-        exit(Result)
-    end).
-
-
-create_db(DbName) ->
-    couch_db:create(
-        DbName,
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]).
-
-
-delete_db(Db) ->
-    ok = couch_server:delete(
-        couch_db:name(Db), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/5cdf7a0c/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 517705f..446a3f3 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    074-doc-update-conflicts.t \
     075-auth-cache.t \
     076-file-compression.t \
     077-couch-db-fast-db-delete-create.t \


[19/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 080-config-get-set.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/f90ddf59
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/f90ddf59
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/f90ddf59

Branch: refs/heads/1963-eunit
Commit: f90ddf5918f5399a591b1fb427440fb48f707e94
Parents: cfc6f6a
Author: Alexander Shorin <kx...@apache.org>
Authored: Fri May 23 09:29:22 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:26 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am            |   1 +
 test/couchdb/couch_config_tests.erl | 163 +++++++++++++++++++++++++++++++
 test/etap/080-config-get-set.t      | 128 ------------------------
 test/etap/Makefile.am               |   1 -
 4 files changed, 164 insertions(+), 129 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/f90ddf59/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index b763a0c..8668b96 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -33,6 +33,7 @@ eunit_files = \
     couchdb_update_conflicts_tests.erl \
     couch_auth_cache_tests.erl \
     couchdb_file_compression_tests.erl \
+    couch_config_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f90ddf59/test/couchdb/couch_config_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_config_tests.erl b/test/couchdb/couch_config_tests.erl
new file mode 100644
index 0000000..9d09640
--- /dev/null
+++ b/test/couchdb/couch_config_tests.erl
@@ -0,0 +1,163 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_config_tests).
+
+-include("../../src/couchdb/couch_db.hrl").
+-include("couchdb_tests.hrl").
+
+
+setup() ->
+    {ok, Pid} = couch_config:start_link(?CONFIG_CHAIN),
+    Pid.
+
+teardown(Pid) ->
+    couch_config:stop(),
+    erlang:monitor(process, Pid),
+    receive
+        {'DOWN', _, _, Pid, _} ->
+            ok
+    after 1000 ->
+        throw({timeout_error, config_stop})
+    end.
+
+
+couch_config_test_() ->
+    {
+        "CouchDB config tests",
+        [
+            couch_config_get_tests(),
+            couch_config_set_tests(),
+            couch_config_del_tests()
+        ]
+    }.
+
+couch_config_get_tests() ->
+    {
+        "Config get tests",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                should_load_all_configs(),
+                should_locate_daemons_section(),
+                should_locate_mrview_handler(),
+                should_return_undefined_atom_on_missed_section(),
+                should_return_undefined_atom_on_missed_option(),
+                should_return_custom_default_value_on_missed_option(),
+                should_only_return_default_on_missed_option(),
+                should_get_binary_option()
+            ]
+        }
+    }.
+
+couch_config_set_tests() ->
+    {
+        "Config set tests",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                should_update_option(),
+                should_create_new_section(),
+                should_set_binary_option()
+            ]
+        }
+    }.
+
+couch_config_del_tests() ->
+    {
+        "Config deletion tests",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                should_return_undefined_atom_after_option_deletion(),
+                should_be_ok_on_deleting_unknown_options(),
+                should_delete_binary_option()
+            ]
+        }
+    }.
+
+
+should_load_all_configs() ->
+    ?_assert(length(couch_config:all()) > 0).
+
+should_locate_daemons_section() ->
+    ?_assert(length(couch_config:get("daemons")) > 0).
+
+should_locate_mrview_handler() ->
+    ?_assertEqual("{couch_mrview_http, handle_view_req}",
+                  couch_config:get("httpd_design_handlers", "_view")).
+
+should_return_undefined_atom_on_missed_section() ->
+    ?_assertEqual(undefined,
+                  couch_config:get("foo", "bar")).
+
+should_return_undefined_atom_on_missed_option() ->
+    ?_assertEqual(undefined,
+                  couch_config:get("httpd", "foo")).
+
+should_return_custom_default_value_on_missed_option() ->
+    ?_assertEqual("bar",
+                  couch_config:get("httpd", "foo", "bar")).
+
+should_only_return_default_on_missed_option() ->
+    ?_assertEqual("0",
+                  couch_config:get("httpd", "port", "bar")).
+
+should_get_binary_option() ->
+    ?_assertEqual(<<"baz">>,
+                  couch_config:get(<<"foo">>, <<"bar">>, <<"baz">>)).
+
+should_update_option() ->
+    ?_assertEqual("severe",
+        begin
+            ok = couch_config:set("log", "level", "severe", false),
+            couch_config:get("log", "level")
+        end).
+
+should_create_new_section() ->
+    ?_assertEqual("bang",
+        begin
+            undefined = couch_config:get("new_section", "bizzle"),
+            ok = couch_config:set("new_section", "bizzle", "bang", false),
+            couch_config:get("new_section", "bizzle")
+        end).
+
+should_set_binary_option() ->
+    ?_assertEqual(<<"baz">>,
+        begin
+            ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false),
+            couch_config:get(<<"foo">>, <<"bar">>)
+        end).
+
+should_return_undefined_atom_after_option_deletion() ->
+    ?_assertEqual(undefined,
+        begin
+            ok = couch_config:delete("log", "level", false),
+            couch_config:get("log", "level")
+        end).
+
+should_be_ok_on_deleting_unknown_options() ->
+    ?_assertEqual(ok,
+        begin
+            couch_config:delete("zoo", "boo", false)
+        end).
+
+should_delete_binary_option() ->
+    ?_assertEqual(undefined,
+        begin
+            ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false),
+            ok = couch_config:delete(<<"foo">>, <<"bar">>, false),
+            couch_config:get(<<"foo">>, <<"bar">>)
+        end).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f90ddf59/test/etap/080-config-get-set.t
----------------------------------------------------------------------
diff --git a/test/etap/080-config-get-set.t b/test/etap/080-config-get-set.t
deleted file mode 100755
index 94a9cba..0000000
--- a/test/etap/080-config-get-set.t
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(12),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    % start couch_config with default
-    couch_config:start_link([default_config()]),
-
-
-    % Check that we can get values
-
-
-    etap:fun_is(
-        fun(List) -> length(List) > 0 end,
-        couch_config:all(),
-        "Data was loaded from the INI file."
-    ),
-
-    etap:fun_is(
-        fun(List) -> length(List) > 0 end,
-        couch_config:get("daemons"),
-        "There are settings in the [daemons] section of the INI file."
-    ),
-
-    etap:is(
-        couch_config:get("httpd_design_handlers", "_view"),
-        "{couch_mrview_http, handle_view_req}",
-        "The {httpd_design_handlers, view} is the expected default."
-    ),
-
-    etap:is(
-        couch_config:get("httpd", "foo", "bar"),
-        "bar",
-        "Returns the default when key doesn't exist in config."
-    ),
-
-    etap:is(
-        couch_config:get("httpd", "foo"),
-        undefined,
-        "The default default is the atom 'undefined'."
-    ),
-
-    etap:is(
-        couch_config:get("httpd", "port", "bar"),
-        "5984",
-        "Only returns the default when the config setting does not exist."
-    ),
-
-
-    % Check that setting values works.
-
-
-    ok = couch_config:set("log", "level", "severe", false),
-
-    etap:is(
-        couch_config:get("log", "level"),
-        "severe",
-        "Non persisted changes take effect."
-    ),
-
-    etap:is(
-        couch_config:get("new_section", "bizzle"),
-        undefined,
-        "Section 'new_section' does not exist."
-    ),
-
-    ok = couch_config:set("new_section", "bizzle", "bang", false),
-
-    etap:is(
-        couch_config:get("new_section", "bizzle"),
-        "bang",
-        "New section 'new_section' was created for a new key/value pair."
-    ),
-
-
-    % Check that deleting works
-
-
-    ok = couch_config:delete("new_section", "bizzle", false),
-    etap:is(
-        couch_config:get("new_section", "bizzle"),
-        undefined,
-        "Deleting sets the value to \"\""
-    ),
-
-
-    % Check ge/set/delete binary strings
-
-    ok = couch_config:set(<<"foo">>, <<"bar">>, <<"baz">>, false),
-    etap:is(
-        couch_config:get(<<"foo">>, <<"bar">>),
-        <<"baz">>,
-        "Can get and set with binary section and key values."
-    ),
-    ok = couch_config:delete(<<"foo">>, <<"bar">>, false),
-    etap:is(
-        couch_config:get(<<"foo">>, <<"bar">>),
-        undefined,
-        "Deleting with binary section/key pairs sets the value to \"\""
-    ),
-
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f90ddf59/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 3d0ad08..436a27b 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    080-config-get-set.t \
     081-config-override.1.ini \
     081-config-override.2.ini \
     081-config-override.t \


[35/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 172-os-daemons-errors.t etap test suite to eunit

Merged into couchdb_os_daemons_tests suite.
Removed errors redirection to /dev/null to explicitly signal that
permissions are set correctly.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/c0ed8cfc
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/c0ed8cfc
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/c0ed8cfc

Branch: refs/heads/1963-eunit
Commit: c0ed8cfc4394355fcbb5f609d35aa0b68cf56a4d
Parents: 21a1c8b
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 15:54:33 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:53:18 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                       |   6 +
 test/couchdb/couchdb_os_daemons_tests.erl      |  64 +++++++++-
 test/couchdb/fixtures/os_daemon_bad_perm.sh    |  17 +++
 test/couchdb/fixtures/os_daemon_can_reboot.sh  |  15 +++
 test/couchdb/fixtures/os_daemon_die_on_boot.sh |  15 +++
 test/couchdb/fixtures/os_daemon_die_quickly.sh |  15 +++
 test/etap/172-os-daemon-errors.1.sh            |  17 ---
 test/etap/172-os-daemon-errors.2.sh            |  15 ---
 test/etap/172-os-daemon-errors.3.sh            |  15 ---
 test/etap/172-os-daemon-errors.4.sh            |  15 ---
 test/etap/172-os-daemon-errors.t               | 126 --------------------
 test/etap/Makefile.am                          |   5 -
 12 files changed, 131 insertions(+), 194 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index cb66c85..bf306c8 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -16,6 +16,8 @@ all:
 	mkdir -p {ebin,temp}
 	${ERLC} -oebin test_request.erl
 	chmod +x run
+	chmod +x fixtures/*.sh
+	chmod -x fixtures/os_daemon_bad_perm.sh
 
 eunit_files = \
     couchdb_modules_load_tests.erl \
@@ -50,6 +52,10 @@ fixture_files = \
     fixtures/couch_stats_aggregates.ini \
     fixtures/os_daemon_looper.escript \
     fixtures/os_daemon_configer.escript \
+    fixtures/os_daemon_bad_perm.sh \
+    fixtures/os_daemon_can_reboot.sh \
+    fixtures/os_daemon_die_on_boot.sh \
+    fixtures/os_daemon_die_quickly.sh \
     fixtures/logo.png
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/couchdb/couchdb_os_daemons_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_os_daemons_tests.erl b/test/couchdb/couchdb_os_daemons_tests.erl
index 277127f..0591176 100644
--- a/test/couchdb/couchdb_os_daemons_tests.erl
+++ b/test/couchdb/couchdb_os_daemons_tests.erl
@@ -28,6 +28,10 @@
 
 -define(DAEMON_CONFIGER, "os_daemon_configer.escript").
 -define(DAEMON_LOOPER, "os_daemon_looper.escript").
+-define(DAEMON_BAD_PERM, "os_daemon_bad_perm.sh").
+-define(DAEMON_CAN_REBOOT, "os_daemon_can_reboot.sh").
+-define(DAEMON_DIE_ON_BOOT, "os_daemon_die_on_boot.sh").
+-define(DAEMON_DIE_QUICKLY, "os_daemon_die_quickly.sh").
 -define(DELAY, 100).
 
 
@@ -83,6 +87,20 @@ configuration_reader_test_() ->
             fun setup/1, fun teardown/2,
             [{?DAEMON_CONFIGER,
               fun should_read_write_config_settings_by_daemon/2}]
+
+        }
+    }.
+
+error_test_() ->
+    {
+        "OS Daemon process error tests",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{?DAEMON_BAD_PERM, fun should_fail_due_to_lack_of_permissions/2},
+             {?DAEMON_DIE_ON_BOOT, fun should_die_on_boot/2},
+             {?DAEMON_DIE_QUICKLY, fun should_die_quickly/2},
+             {?DAEMON_CAN_REBOOT, fun should_not_being_halted/2}]
         }
     }.
 
@@ -153,13 +171,57 @@ should_read_write_config_settings_by_daemon(DName, _) ->
         check_daemon(D, DName)
     end).
 
+should_fail_due_to_lack_of_permissions(DName, _) ->
+    ?_test(should_halts(DName, 1000)).
+
+should_die_on_boot(DName, _) ->
+    ?_test(should_halts(DName, 1000)).
+
+should_die_quickly(DName, _) ->
+    ?_test(should_halts(DName, 4000)).
+
+should_not_being_halted(DName, _) ->
+    ?_test(begin
+        timer:sleep(1000),
+        {ok, [D1]} = couch_os_daemons:info([table]),
+        check_daemon(D1, DName, 0),
+
+        % Should reboot every two seconds. We're at 1s, so wait
+        % until 3s to be in the middle of the next invocation's
+        % life span.
+
+        timer:sleep(2000),
+        {ok, [D2]} = couch_os_daemons:info([table]),
+        check_daemon(D2, DName, 1),
+
+        % If the kill command changed, that means we rebooted the process.
+        ?assertNotEqual(D1#daemon.kill, D2#daemon.kill)
+    end).
+
+should_halts(DName, Time) ->
+    timer:sleep(Time),
+    {ok, [D]} = couch_os_daemons:info([table]),
+    check_dead(D, DName),
+    couch_config:delete("os_daemons", DName, false).
 
 check_daemon(D) ->
     check_daemon(D, D#daemon.name).
 
 check_daemon(D, Name) ->
+    check_daemon(D, Name, 0).
+
+check_daemon(D, Name, Errs) ->
     ?assert(is_port(D#daemon.port)),
     ?assertEqual(Name, D#daemon.name),
     ?assertNotEqual(undefined, D#daemon.kill),
-    ?assertEqual([], D#daemon.errors),
+    ?assertEqual(running, D#daemon.status),
+    ?assertEqual(Errs, length(D#daemon.errors)),
     ?assertEqual([], D#daemon.buf).
+
+check_dead(D, Name) ->
+    ?assert(is_port(D#daemon.port)),
+    ?assertEqual(Name, D#daemon.name),
+    ?assertNotEqual(undefined, D#daemon.kill),
+    ?assertEqual(halted, D#daemon.status),
+    ?assertEqual(nil, D#daemon.errors),
+    ?assertEqual(nil, D#daemon.buf).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/couchdb/fixtures/os_daemon_bad_perm.sh
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/os_daemon_bad_perm.sh b/test/couchdb/fixtures/os_daemon_bad_perm.sh
new file mode 100644
index 0000000..345c8b4
--- /dev/null
+++ b/test/couchdb/fixtures/os_daemon_bad_perm.sh
@@ -0,0 +1,17 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+# 
+# Please do not make this file executable as that's the error being tested.
+
+sleep 5

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/couchdb/fixtures/os_daemon_can_reboot.sh
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/os_daemon_can_reboot.sh b/test/couchdb/fixtures/os_daemon_can_reboot.sh
new file mode 100755
index 0000000..5bc10e8
--- /dev/null
+++ b/test/couchdb/fixtures/os_daemon_can_reboot.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+sleep 2

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/couchdb/fixtures/os_daemon_die_on_boot.sh
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/os_daemon_die_on_boot.sh b/test/couchdb/fixtures/os_daemon_die_on_boot.sh
new file mode 100755
index 0000000..256ee79
--- /dev/null
+++ b/test/couchdb/fixtures/os_daemon_die_on_boot.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+exit 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/couchdb/fixtures/os_daemon_die_quickly.sh
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/os_daemon_die_quickly.sh b/test/couchdb/fixtures/os_daemon_die_quickly.sh
new file mode 100755
index 0000000..f5a1368
--- /dev/null
+++ b/test/couchdb/fixtures/os_daemon_die_quickly.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# 
+#   http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+sleep 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/etap/172-os-daemon-errors.1.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.1.sh b/test/etap/172-os-daemon-errors.1.sh
deleted file mode 100644
index 345c8b4..0000000
--- a/test/etap/172-os-daemon-errors.1.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-# 
-# Please do not make this file executable as that's the error being tested.
-
-sleep 5

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/etap/172-os-daemon-errors.2.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.2.sh b/test/etap/172-os-daemon-errors.2.sh
deleted file mode 100755
index 256ee79..0000000
--- a/test/etap/172-os-daemon-errors.2.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-exit 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/etap/172-os-daemon-errors.3.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.3.sh b/test/etap/172-os-daemon-errors.3.sh
deleted file mode 100755
index f5a1368..0000000
--- a/test/etap/172-os-daemon-errors.3.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-sleep 1

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/etap/172-os-daemon-errors.4.sh
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.4.sh b/test/etap/172-os-daemon-errors.4.sh
deleted file mode 100755
index 5bc10e8..0000000
--- a/test/etap/172-os-daemon-errors.4.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh -e
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-
-sleep 2

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/etap/172-os-daemon-errors.t
----------------------------------------------------------------------
diff --git a/test/etap/172-os-daemon-errors.t b/test/etap/172-os-daemon-errors.t
deleted file mode 100755
index bde5c6f..0000000
--- a/test/etap/172-os-daemon-errors.t
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-config_files() ->
-    lists:map(fun test_util:build_file/1, [
-        "etc/couchdb/default_dev.ini"
-    ]).
-
-bad_perms() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.1.sh").
-
-die_on_boot() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.2.sh").
-
-die_quickly() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.3.sh").
-
-can_reboot() ->
-    test_util:source_file("test/etap/172-os-daemon-errors.4.sh").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(36),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(config_files()),
-    couch_os_daemons:start_link(),
-
-    etap:diag("Daemon not executable."),
-    test_halts("foo", bad_perms(), 1000),
-
-    etap:diag("Daemon dies on boot."),
-    test_halts("bar", die_on_boot(), 1000),
-
-    etap:diag("Daemon dies quickly after boot."),
-    test_halts("baz", die_quickly(), 4000),
-    
-    etap:diag("Daemon dies, but not quickly enough to be halted."),
-    test_runs("bam", can_reboot()),
-    
-    ok.
-
-test_halts(Name, Cmd, Time) ->
-    couch_config:set("os_daemons", Name, Cmd ++ " 2> /dev/null", false),
-    timer:sleep(Time),
-    {ok, [D]} = couch_os_daemons:info([table]),
-    check_dead(D, Name, Cmd),
-    couch_config:delete("os_daemons", Name, false).
-
-test_runs(Name, Cmd) ->
-    couch_config:set("os_daemons", Name, Cmd, false),
-
-    timer:sleep(1000),
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, Name, Cmd, 0),
-    
-    % Should reboot every two seconds. We're at 1s, so wait
-    % utnil 3s to be in the middle of the next invocation's
-    % life span.
-    timer:sleep(2000),
-    {ok, [D2]} = couch_os_daemons:info([table]),
-    check_daemon(D2, Name, Cmd, 1),
-    
-    % If the kill command changed, that means we rebooted the process.
-    etap:isnt(D1#daemon.kill, D2#daemon.kill, "Kill command changed.").
-
-check_dead(D, Name, Cmd) ->
-    BaseName = filename:basename(Cmd) ++ " 2> /dev/null",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.status, halted, "Daemon has been halted."),
-    etap:is(D#daemon.errors, nil, "Errors have been disabled."),
-    etap:is(D#daemon.buf, nil, "Buffer has been switched off.").
-
-check_daemon(D, Name, Cmd, Errs) ->
-    BaseName = filename:basename(Cmd),
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.status, running, "Daemon still running."),
-    etap:is(length(D#daemon.errors), Errs, "Found expected number of errors."),
-    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/c0ed8cfc/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 1ec6a48..e42d398 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,11 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    172-os-daemon-errors.1.sh \
-    172-os-daemon-errors.2.sh \
-    172-os-daemon-errors.3.sh \
-    172-os-daemon-errors.4.sh \
-    172-os-daemon-errors.t \
     173-os-daemon-cfg-register.t \
     180-http-proxy.ini \
     180-http-proxy.t \


[21/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 076-file-compression.t etap test suite to eunit

The original test suite was decoupled into compaction and comparison
cases.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/db5d2277
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/db5d2277
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/db5d2277

Branch: refs/heads/1963-eunit
Commit: db5d2277a5e8133fc07c27412797556bda554fdb
Parents: c14cf25
Author: Alexander Shorin <kx...@apache.org>
Authored: Thu May 22 20:17:47 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:26 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                        |   1 +
 test/couchdb/couchdb_file_compression_tests.erl | 236 +++++++++++++++++++
 test/etap/076-file-compression.t                | 186 ---------------
 test/etap/Makefile.am                           |   1 -
 4 files changed, 237 insertions(+), 187 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/db5d2277/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 20c1309..b763a0c 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -32,6 +32,7 @@ eunit_files = \
     couch_changes_tests.erl \
     couchdb_update_conflicts_tests.erl \
     couch_auth_cache_tests.erl \
+    couchdb_file_compression_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/db5d2277/test/couchdb/couchdb_file_compression_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_file_compression_tests.erl b/test/couchdb/couchdb_file_compression_tests.erl
new file mode 100644
index 0000000..a2b89d3
--- /dev/null
+++ b/test/couchdb/couchdb_file_compression_tests.erl
@@ -0,0 +1,236 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_file_compression_tests).
+
+-include("../../src/couchdb/couch_db.hrl").
+-include("couchdb_tests.hrl").
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
+-define(DDOC_ID, <<"_design/test">>).
+-define(DOCS_COUNT, 5000).
+-define(TIMEOUT, 30000).
+
+
+start() ->
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    % disable logging to reduce noise in stdout
+    couch_config:set("log", "level", "none", false),
+    ok.
+
+stop(_) ->
+    couch_server_sup:stop(),
+    ok.
+
+setup() ->
+    couch_config:set("couchdb", "file_compression", "none", false),
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]),
+    ok = populate_db(Db, ?DOCS_COUNT),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, ?DDOC_ID},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+                {<<"by_id">>, {[
+                    {<<"map">>, <<"function(doc){emit(doc._id, doc.string);}">>}
+                ]}}
+            ]}
+        }
+    ]}),
+    {ok, _} = couch_db:update_doc(Db, DDoc, []),
+    refresh_index(DbName),
+    ok = couch_db:close(Db),
+    DbName.
+
+teardown(DbName) ->
+    ok = couch_server:delete(DbName, [?ADMIN_USER]),
+    ok.
+
+
+couch_auth_cache_test_() ->
+    {
+        "CouchDB file compression tests",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            {
+                foreach,
+                fun setup/0, fun teardown/1,
+                [
+                    fun should_use_none/1,
+                    fun should_use_deflate_1/1,
+                    fun should_use_deflate_9/1,
+                    fun should_use_snappy/1,
+                    fun should_compare_compression_methods/1
+                ]
+            }
+        }
+    }.
+
+
+should_use_none(DbName) ->
+    couch_config:set("couchdb", "file_compression", "none", false),
+    {
+        "Use no compression",
+        [
+            {"compact database", ?_test(compact_db(DbName))},
+            {"compact view", ?_test(compact_view(DbName))}
+        ]
+    }.
+
+should_use_deflate_1(DbName) ->
+    couch_config:set("couchdb", "file_compression", "deflate_1", false),
+    {
+        "Use deflate compression at level 1",
+        [
+            {"compact database", ?_test(compact_db(DbName))},
+            {"compact view", ?_test(compact_view(DbName))}
+        ]
+    }.
+
+should_use_deflate_9(DbName) ->
+    couch_config:set("couchdb", "file_compression", "deflate_9", false),
+    {
+        "Use deflate compression at level 9",
+        [
+            {"compact database", ?_test(compact_db(DbName))},
+            {"compact view", ?_test(compact_view(DbName))}
+        ]
+    }.
+
+should_use_snappy(DbName) ->
+    couch_config:set("couchdb", "file_compression", "snappy", false),
+    {
+        "Use snappy compression",
+        [
+            {"compact database", ?_test(compact_db(DbName))},
+            {"compact view", ?_test(compact_view(DbName))}
+        ]
+    }.
+
+should_compare_compression_methods(DbName) ->
+    {"none > snappy > deflate_1 > deflate_9",
+     {timeout, ?TIMEOUT div 1000, ?_test(compare_compression_methods(DbName))}}.
+
+compare_compression_methods(DbName) ->
+    couch_config:set("couchdb", "file_compression", "none", false),
+    compact_db(DbName),
+    compact_view(DbName),
+    DbSizeNone = db_disk_size(DbName),
+    ViewSizeNone = view_disk_size(DbName),
+
+    couch_config:set("couchdb", "file_compression", "snappy", false),
+    compact_db(DbName),
+    compact_view(DbName),
+    DbSizeSnappy = db_disk_size(DbName),
+    ViewSizeSnappy = view_disk_size(DbName),
+
+    ?assert(DbSizeNone > DbSizeSnappy),
+    ?assert(ViewSizeNone > ViewSizeSnappy),
+
+    couch_config:set("couchdb", "file_compression", "deflate_1", false),
+    compact_db(DbName),
+    compact_view(DbName),
+    DbSizeDeflate1 = db_disk_size(DbName),
+    ViewSizeDeflate1 = view_disk_size(DbName),
+
+    ?assert(DbSizeSnappy > DbSizeDeflate1),
+    ?assert(ViewSizeSnappy > ViewSizeDeflate1),
+
+    couch_config:set("couchdb", "file_compression", "deflate_9", false),
+    compact_db(DbName),
+    compact_view(DbName),
+    DbSizeDeflate9 = db_disk_size(DbName),
+    ViewSizeDeflate9 = view_disk_size(DbName),
+
+    ?assert(DbSizeDeflate1 > DbSizeDeflate9),
+    ?assert(ViewSizeDeflate1 > ViewSizeDeflate9).
+
+
+populate_db(_Db, NumDocs) when NumDocs =< 0 ->
+    ok;
+populate_db(Db, NumDocs) ->
+    Docs = lists:map(
+        fun(_) ->
+            couch_doc:from_json_obj({[
+                {<<"_id">>, couch_uuids:random()},
+                {<<"string">>, ?l2b(lists:duplicate(1000, $X))}
+            ]})
+        end,
+        lists:seq(1, 500)),
+    {ok, _} = couch_db:update_docs(Db, Docs, []),
+    populate_db(Db, NumDocs - 500).
+
+refresh_index(DbName) ->
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]),
+    couch_mrview:query_view(Db, DDoc, <<"by_id">>, [{stale, false}]),
+    ok = couch_db:close(Db).
+
+compact_db(DbName) ->
+    DiskSizeBefore = db_disk_size(DbName),
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, CompactPid} = couch_db:start_compact(Db),
+    MonRef = erlang:monitor(process, CompactPid),
+    receive
+        {'DOWN', MonRef, process, CompactPid, normal} ->
+            ok;
+        {'DOWN', MonRef, process, CompactPid, Reason} ->
+            erlang:error({assertion_failed,
+                          [{module, ?MODULE},
+                           {line, ?LINE},
+                           {reason, "Error compacting database: "
+                                    ++ couch_util:to_list(Reason)}]})
+    after ?TIMEOUT ->
+        erlang:error({assertion_failed,
+                      [{module, ?MODULE},
+                       {line, ?LINE},
+                       {reason, "Timeout waiting for database compaction"}]})
+    end,
+    ok = couch_db:close(Db),
+    DiskSizeAfter = db_disk_size(DbName),
+    ?assert(DiskSizeBefore > DiskSizeAfter).
+
+compact_view(DbName) ->
+    DiskSizeBefore = view_disk_size(DbName),
+    {ok, MonRef} = couch_mrview:compact(DbName, ?DDOC_ID, [monitor]),
+    receive
+        {'DOWN', MonRef, process, _CompactPid, normal} ->
+            ok;
+        {'DOWN', MonRef, process, _CompactPid, Reason} ->
+            erlang:error({assertion_failed,
+                          [{module, ?MODULE},
+                           {line, ?LINE},
+                           {reason, "Error compacting view group: "
+                                    ++ couch_util:to_list(Reason)}]})
+    after ?TIMEOUT ->
+        erlang:error({assertion_failed,
+                      [{module, ?MODULE},
+                       {line, ?LINE},
+                       {reason, "Timeout waiting for view group compaction"}]})
+    end,
+    DiskSizeAfter = view_disk_size(DbName),
+    ?assert(DiskSizeBefore > DiskSizeAfter).
+
+db_disk_size(DbName) ->
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, Info} = couch_db:get_db_info(Db),
+    ok = couch_db:close(Db),
+    couch_util:get_value(disk_size, Info).
+
+view_disk_size(DbName) ->
+    {ok, Db} = couch_db:open_int(DbName, []),
+    {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]),
+    {ok, Info} = couch_mrview:get_info(Db, DDoc),
+    ok = couch_db:close(Db),
+    couch_util:get_value(disk_size, Info).
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/db5d2277/test/etap/076-file-compression.t
----------------------------------------------------------------------
diff --git a/test/etap/076-file-compression.t b/test/etap/076-file-compression.t
deleted file mode 100755
index 2929230..0000000
--- a/test/etap/076-file-compression.t
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(user_ctx, {
-    name = null,
-    roles = [],
-    handler
-}).
-
-test_db_name() -> <<"couch_test_file_compression">>.
-ddoc_id() -> <<"_design/test">>.
-num_docs() -> 5000.
-
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(10),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    couch_config:set("couchdb", "file_compression", "none", false),
-
-    create_database(),
-    compact_db(),
-    compact_view(),
-    DbDiskSize1 = db_disk_size(),
-    ViewDiskSize1 = view_disk_size(),
-
-    couch_config:set("couchdb", "file_compression", "snappy", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize2 = db_disk_size(),
-    ViewDiskSize2 = view_disk_size(),
-
-    etap:is(DbDiskSize2 < DbDiskSize1, true, "Database disk size decreased"),
-    etap:is(ViewDiskSize2 < ViewDiskSize1, true, "Index disk size decreased"),
-
-    couch_config:set("couchdb", "file_compression", "deflate_9", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize3 = db_disk_size(),
-    ViewDiskSize3 = view_disk_size(),
-
-    etap:is(DbDiskSize3 < DbDiskSize2, true, "Database disk size decreased again"),
-    etap:is(ViewDiskSize3 < ViewDiskSize2, true, "Index disk size decreased again"),
-
-    couch_config:set("couchdb", "file_compression", "deflate_1", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize4 = db_disk_size(),
-    ViewDiskSize4 = view_disk_size(),
-
-    etap:is(DbDiskSize4 > DbDiskSize3, true, "Database disk size increased"),
-    etap:is(ViewDiskSize4 > ViewDiskSize3, true, "Index disk size increased"),
-
-    couch_config:set("couchdb", "file_compression", "snappy", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize5 = db_disk_size(),
-    ViewDiskSize5 = view_disk_size(),
-
-    etap:is(DbDiskSize5 > DbDiskSize4, true, "Database disk size increased again"),
-    etap:is(ViewDiskSize5 > ViewDiskSize4, true, "Index disk size increased again"),
-
-    couch_config:set("couchdb", "file_compression", "none", false),
-    compact_db(),
-    compact_view(),
-    DbDiskSize6 = db_disk_size(),
-    ViewDiskSize6 = view_disk_size(),
-
-    etap:is(DbDiskSize6 > DbDiskSize5, true, "Database disk size increased again"),
-    etap:is(ViewDiskSize6 > ViewDiskSize5, true, "Index disk size increased again"),
-
-    delete_db(),
-    couch_server_sup:stop(),
-    ok.
-
-
-create_database() ->
-    {ok, Db} = couch_db:create(
-        test_db_name(),
-        [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}, overwrite]),
-    ok = populate_db(Db, num_docs()),
-    DDoc = couch_doc:from_json_obj({[
-        {<<"_id">>, ddoc_id()},
-        {<<"language">>, <<"javascript">>},
-        {<<"views">>, {[
-                {<<"view1">>, {[
-                    {<<"map">>, <<"function(doc) { emit(doc._id, doc.string); }">>}
-                ]}}
-            ]}
-        }
-    ]}),
-    {ok, _} = couch_db:update_doc(Db, DDoc, []),
-    refresh_index(),
-    ok = couch_db:close(Db).
-
-
-populate_db(_Db, NumDocs) when NumDocs =< 0 ->
-    ok;
-populate_db(Db, NumDocs) ->
-    Docs = lists:map(
-        fun(_) ->
-            couch_doc:from_json_obj({[
-                {<<"_id">>, couch_uuids:random()},
-                {<<"string">>, list_to_binary(lists:duplicate(1000, $X))}
-            ]})
-        end,
-        lists:seq(1, 500)),
-    {ok, _} = couch_db:update_docs(Db, Docs, []),
-    populate_db(Db, NumDocs - 500).
-
-
-refresh_index() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, DDoc} = couch_db:open_doc(Db, ddoc_id(), [ejson_body]),
-    couch_mrview:query_view(Db, DDoc, <<"view1">>, [{stale, false}]),
-    ok = couch_db:close(Db).
-
-
-compact_db() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, CompactPid} = couch_db:start_compact(Db),
-    MonRef = erlang:monitor(process, CompactPid),
-    receive
-    {'DOWN', MonRef, process, CompactPid, normal} ->
-        ok;
-    {'DOWN', MonRef, process, CompactPid, Reason} ->
-        etap:bail("Error compacting database: " ++ couch_util:to_list(Reason))
-    after 120000 ->
-        etap:bail("Timeout waiting for database compaction")
-    end,
-    ok = couch_db:close(Db).
-
-
-compact_view() ->
-    {ok, MonRef} = couch_mrview:compact(test_db_name(), ddoc_id(), [monitor]),
-    receive
-    {'DOWN', MonRef, process, _CompactPid, normal} ->
-        ok;
-    {'DOWN', MonRef, process, _CompactPid, Reason} ->
-        etap:bail("Error compacting view group: " ++ couch_util:to_list(Reason))
-    after 120000 ->
-        etap:bail("Timeout waiting for view group compaction")
-    end.
-
-
-db_disk_size() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, Info} = couch_db:get_db_info(Db),
-    ok = couch_db:close(Db),
-    couch_util:get_value(disk_size, Info).
-
-
-view_disk_size() ->
-    {ok, Db} = couch_db:open_int(test_db_name(), []),
-    {ok, DDoc} = couch_db:open_doc(Db, ddoc_id(), [ejson_body]),
-    {ok, Info} = couch_mrview:get_info(Db, DDoc),
-    ok = couch_db:close(Db),
-    couch_util:get_value(disk_size, Info).
-
-
-delete_db() ->
-    ok = couch_server:delete(
-        test_db_name(), [{user_ctx, #user_ctx{roles = [<<"_admin">>]}}]).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/db5d2277/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 3673ef9..d950e46 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    076-file-compression.t \
     077-couch-db-fast-db-delete-create.t \
     080-config-get-set.t \
     081-config-override.1.ini \


[06/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 030-doc-from-json.t and 031-doc-to-json.t etap suites to eunit

Both merged into single suite since they tests single target and shares
common bits.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/09677555
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/09677555
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/09677555

Branch: refs/heads/1963-eunit
Commit: 09677555e5e1dc38a7d6827bd156bc394f25b06f
Parents: 9235410
Author: Alexander Shorin <kx...@apache.org>
Authored: Sat May 17 03:42:01 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:55:24 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am              |   1 +
 test/couchdb/couch_doc_json_tests.erl | 391 +++++++++++++++++++++++++++++
 test/etap/030-doc-from-json.t         | 236 -----------------
 test/etap/031-doc-to-json.t           | 197 ---------------
 test/etap/Makefile.am                 |   2 -
 5 files changed, 392 insertions(+), 435 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/09677555/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 7ad8ae5..c67fe95 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -21,6 +21,7 @@ eunit_files = \
     couch_util_tests.erl \
     couch_file_tests.erl \
     couch_btree_tests.erl \
+    couch_doc_json_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/09677555/test/couchdb/couch_doc_json_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_doc_json_tests.erl b/test/couchdb/couch_doc_json_tests.erl
new file mode 100644
index 0000000..18b3056
--- /dev/null
+++ b/test/couchdb/couch_doc_json_tests.erl
@@ -0,0 +1,391 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_doc_json_tests).
+
+-include_lib("couchdb_tests.hrl").
+-include("../../src/couchdb/couch_db.hrl").
+
+
+setup() ->
+    couch_config:start_link(?CONFIG_CHAIN),
+    couch_config:set("attachments", "compression_level", "0", false),
+    ok.
+
+teardown(_) ->
+    couch_config:stop().
+
+
+json_doc_test_() ->
+    {
+        setup,
+        fun setup/0, fun teardown/1,
+        [
+            {
+                "Document from JSON",
+                [
+                    from_json_success_cases(),
+                    from_json_error_cases()
+                ]
+            },
+            {
+                "Document to JSON",
+                [
+                    to_json_success_cases()
+                ]
+            }
+        ]
+    }.
+
+from_json_success_cases() ->
+    Cases = [
+        {
+            {[]},
+            #doc{},
+            "Return an empty document for an empty JSON object."
+        },
+        {
+            {[{<<"_id">>, <<"zing!">>}]},
+            #doc{id = <<"zing!">>},
+            "Parses document ids."
+        },
+        {
+            {[{<<"_id">>, <<"_design/foo">>}]},
+            #doc{id = <<"_design/foo">>},
+            "_design/document ids."
+        },
+        {
+            {[{<<"_id">>, <<"_local/bam">>}]},
+            #doc{id = <<"_local/bam">>},
+            "_local/document ids."
+        },
+        {
+            {[{<<"_rev">>, <<"4-230234">>}]},
+            #doc{revs = {4, [<<"230234">>]}},
+            "_rev stored in revs."
+        },
+        {
+            {[{<<"soap">>, 35}]},
+            #doc{body = {[{<<"soap">>, 35}]}},
+            "Non underscore prefixed fields stored in body."
+        },
+        {
+            {[{<<"_attachments">>, {[
+                {<<"my_attachment.fu">>, {[
+                    {<<"stub">>, true},
+                    {<<"content_type">>, <<"application/awesome">>},
+                    {<<"length">>, 45}
+                ]}},
+                {<<"noahs_private_key.gpg">>, {[
+                    {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
+                    {<<"content_type">>, <<"application/pgp-signature">>}
+                ]}}
+            ]}}]},
+            #doc{atts = [
+                #att{
+                    name = <<"my_attachment.fu">>,
+                    data = stub,
+                    type = <<"application/awesome">>,
+                    att_len = 45,
+                    disk_len = 45,
+                    revpos = nil
+                },
+                #att{
+                    name = <<"noahs_private_key.gpg">>,
+                    data = <<"I have a pet fish!">>,
+                    type = <<"application/pgp-signature">>,
+                    att_len = 18,
+                    disk_len = 18,
+                    revpos = 0
+                }
+            ]},
+            "Attachments are parsed correctly."
+        },
+        {
+            {[{<<"_deleted">>, true}]},
+            #doc{deleted = true},
+            "_deleted controls the deleted field."
+        },
+        {
+            {[{<<"_deleted">>, false}]},
+            #doc{},
+            "{\"_deleted\": false} is ok."
+        },
+        {
+            {[
+                 {<<"_revisions">>,
+                  {[{<<"start">>, 4},
+                    {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}]}},
+                 {<<"_rev">>, <<"6-something">>}
+             ]},
+            #doc{revs = {4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
+            "_revisions attribute are preferred to _rev."
+        },
+        {
+            {[{<<"_revs_info">>, dropping}]},
+            #doc{},
+            "Drops _revs_info."
+        },
+        {
+            {[{<<"_local_seq">>, dropping}]},
+            #doc{},
+            "Drops _local_seq."
+        },
+        {
+            {[{<<"_conflicts">>, dropping}]},
+            #doc{},
+            "Drops _conflicts."
+        },
+        {
+            {[{<<"_deleted_conflicts">>, dropping}]},
+            #doc{},
+            "Drops _deleted_conflicts."
+        }
+    ],
+    lists:map(
+        fun({EJson, Expect, Msg}) ->
+            {Msg, ?_assertMatch(Expect, couch_doc:from_json_obj(EJson))}
+        end,
+        Cases).
+
+from_json_error_cases() ->
+    Cases = [
+        {
+            [],
+            {bad_request, "Document must be a JSON object"},
+            "arrays are invalid"
+        },
+        {
+            4,
+            {bad_request, "Document must be a JSON object"},
+            "integers are invalid"
+        },
+        {
+            true,
+            {bad_request, "Document must be a JSON object"},
+            "literals are invalid"
+        },
+        {
+            {[{<<"_id">>, {[{<<"foo">>, 5}]}}]},
+            {bad_request, <<"Document id must be a string">>},
+            "Document id must be a string."
+        },
+        {
+            {[{<<"_id">>, <<"_random">>}]},
+            {bad_request,
+             <<"Only reserved document ids may start with underscore.">>},
+            "Disallow arbitrary underscore prefixed docids."
+        },
+        {
+            {[{<<"_rev">>, 5}]},
+            {bad_request, <<"Invalid rev format">>},
+            "_rev must be a string"
+        },
+        {
+            {[{<<"_rev">>, "foobar"}]},
+            {bad_request, <<"Invalid rev format">>},
+            "_rev must be %d-%s"
+        },
+        {
+            {[{<<"_rev">>, "foo-bar"}]},
+            "Error if _rev's integer expection is broken."
+        },
+        {
+            {[{<<"_revisions">>, {[{<<"start">>, true}]}}]},
+            {doc_validation, "_revisions.start isn't an integer."},
+            "_revisions.start must be an integer."
+        },
+        {
+            {[{<<"_revisions">>, {[{<<"start">>, 0}, {<<"ids">>, 5}]}}]},
+            {doc_validation, "_revisions.ids isn't a array."},
+            "_revions.ids must be a list."
+        },
+        {
+            {[{<<"_revisions">>, {[{<<"start">>, 0}, {<<"ids">>, [5]}]}}]},
+            {doc_validation, "RevId isn't a string"},
+            "Revision ids must be strings."
+        },
+        {
+            {[{<<"_something">>, 5}]},
+            {doc_validation, <<"Bad special document member: _something">>},
+            "Underscore prefix fields are reserved."
+        }
+    ],
+
+    lists:map(fun
+        ({EJson, Expect, Msg}) ->
+            Error = (catch couch_doc:from_json_obj(EJson)),
+            {Msg, ?_assertMatch(Expect, Error)};
+        ({EJson, Msg}) ->
+            try
+                couch_doc:from_json_obj(EJson),
+                {"Conversion failed to raise an exception", ?_assert(false)}
+            catch
+                _:_ -> {Msg, ?_assert(true)}
+            end
+    end, Cases).
+
+to_json_success_cases() ->
+    Cases = [
+        {
+            #doc{},
+            {[{<<"_id">>, <<"">>}]},
+            "Empty docs are {\"_id\": \"\"}"
+        },
+        {
+            #doc{id = <<"foo">>},
+            {[{<<"_id">>, <<"foo">>}]},
+            "_id is added."
+        },
+        {
+            #doc{revs = {5, ["foo"]}},
+            {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]},
+            "_rev is added."
+        },
+        {
+            [revs],
+            #doc{revs = {5, [<<"first">>, <<"second">>]}},
+            {[
+                 {<<"_id">>, <<>>},
+                 {<<"_rev">>, <<"5-first">>},
+                 {<<"_revisions">>, {[
+                     {<<"start">>, 5},
+                     {<<"ids">>, [<<"first">>, <<"second">>]}
+                 ]}}
+             ]},
+            "_revisions include with revs option"
+        },
+        {
+            #doc{body = {[{<<"foo">>, <<"bar">>}]}},
+            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]},
+            "Arbitrary fields are added."
+        },
+        {
+            #doc{deleted = true, body = {[{<<"foo">>, <<"bar">>}]}},
+            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
+            "Deleted docs no longer drop body members."
+        },
+        {
+            #doc{meta = [
+                {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
+            ]},
+            {[
+                 {<<"_id">>, <<>>},
+                 {<<"_revs_info">>, [
+                     {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
+                     {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
+                 ]}
+             ]},
+            "_revs_info field is added correctly."
+        },
+        {
+            #doc{meta = [{local_seq, 5}]},
+            {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]},
+            "_local_seq is added as an integer."
+        },
+        {
+            #doc{meta = [{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]}
+            ]},
+            "_conflicts is added as an array of strings."
+        },
+        {
+            #doc{meta = [{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
+            {[
+                 {<<"_id">>, <<>>},
+                 {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
+             ]},
+            "_deleted_conflicsts is added as an array of strings."
+        },
+        {
+            #doc{atts = [
+                #att{
+                    name = <<"big.xml">>,
+                    type = <<"xml/sucks">>,
+                    data = fun() -> ok end,
+                    revpos = 1,
+                    att_len = 400,
+                    disk_len = 400
+                },
+                #att{
+                    name = <<"fast.json">>,
+                    type = <<"json/ftw">>,
+                    data = <<"{\"so\": \"there!\"}">>,
+                    revpos = 1,
+                    att_len = 16,
+                    disk_len = 16
+                }
+            ]},
+            {[
+                 {<<"_id">>, <<>>},
+                 {<<"_attachments">>, {[
+                       {<<"big.xml">>, {[
+                           {<<"content_type">>, <<"xml/sucks">>},
+                           {<<"revpos">>, 1},
+                           {<<"length">>, 400},
+                           {<<"stub">>, true}
+                       ]}},
+                       {<<"fast.json">>, {[
+                           {<<"content_type">>, <<"json/ftw">>},
+                           {<<"revpos">>, 1},
+                           {<<"length">>, 16},
+                           {<<"stub">>, true}
+                       ]}}
+                ]}}
+            ]},
+            "Attachments attached as stubs only include a length."
+        },
+        {
+            [attachments],
+            #doc{atts = [
+                #att{
+                    name = <<"stuff.txt">>,
+                    type = <<"text/plain">>,
+                    data = fun() -> <<"diet pepsi">> end,
+                    revpos = 1,
+                    att_len = 10,
+                    disk_len = 10
+                },
+                #att{
+                    name = <<"food.now">>,
+                    type = <<"application/food">>,
+                    revpos = 1,
+                    data = <<"sammich">>
+                }
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_attachments">>, {[
+                   {<<"stuff.txt">>, {[
+                       {<<"content_type">>, <<"text/plain">>},
+                       {<<"revpos">>, 1},
+                       {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
+                   ]}},
+                   {<<"food.now">>, {[
+                       {<<"content_type">>, <<"application/food">>},
+                       {<<"revpos">>, 1},
+                       {<<"data">>, <<"c2FtbWljaA==">>}
+                   ]}}
+                ]}}
+            ]},
+            "Attachments included inline with attachments option."
+        }
+    ],
+
+    lists:map(fun
+        ({Doc, EJson, Msg}) ->
+            {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, []))};
+        ({Options, Doc, EJson, Msg}) ->
+            {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, Options))}
+    end, Cases).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/09677555/test/etap/030-doc-from-json.t
----------------------------------------------------------------------
diff --git a/test/etap/030-doc-from-json.t b/test/etap/030-doc-from-json.t
deleted file mode 100755
index b0c393e..0000000
--- a/test/etap/030-doc-from-json.t
+++ /dev/null
@@ -1,236 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% XXX: Figure out how to -include("couch_db.hrl")
--record(doc, {id= <<"">>, revs={0, []}, body={[]},
-            atts=[], deleted=false, meta=[]}).
--record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
-            encoding=identity}).
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(26),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(test_util:config_files()),
-    couch_config:set("attachments", "compression_level", "0", false),
-    ok = test_from_json_success(),
-    ok = test_from_json_errors(),
-    ok.
-
-test_from_json_success() ->
-    Cases = [
-        {
-            {[]},
-            #doc{},
-            "Return an empty document for an empty JSON object."
-        },
-        {
-            {[{<<"_id">>, <<"zing!">>}]},
-            #doc{id= <<"zing!">>},
-            "Parses document ids."
-        },
-        {
-            {[{<<"_id">>, <<"_design/foo">>}]},
-            #doc{id= <<"_design/foo">>},
-            "_design/document ids."
-        },
-        {
-            {[{<<"_id">>, <<"_local/bam">>}]},
-            #doc{id= <<"_local/bam">>},
-            "_local/document ids."
-        },
-        {
-            {[{<<"_rev">>, <<"4-230234">>}]},
-            #doc{revs={4, [<<"230234">>]}},
-            "_rev stored in revs."
-        },
-        {
-            {[{<<"soap">>, 35}]},
-            #doc{body={[{<<"soap">>, 35}]}},
-            "Non underscore prefixed fields stored in body."
-        },
-        {
-            {[{<<"_attachments">>, {[
-                {<<"my_attachment.fu">>, {[
-                    {<<"stub">>, true},
-                    {<<"content_type">>, <<"application/awesome">>},
-                    {<<"length">>, 45}
-                ]}},
-                {<<"noahs_private_key.gpg">>, {[
-                    {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
-                    {<<"content_type">>, <<"application/pgp-signature">>}
-                ]}}
-            ]}}]},
-            #doc{atts=[
-                #att{
-                    name = <<"my_attachment.fu">>,
-                    data = stub,
-                    type = <<"application/awesome">>,
-                    att_len = 45,
-                    disk_len = 45,
-                    revpos = nil
-                },
-                #att{
-                    name = <<"noahs_private_key.gpg">>,
-                    data = <<"I have a pet fish!">>,
-                    type = <<"application/pgp-signature">>,
-                    att_len = 18,
-                    disk_len = 18,
-                    revpos = 0
-                }
-            ]},
-            "Attachments are parsed correctly."
-        },
-        {
-            {[{<<"_deleted">>, true}]},
-            #doc{deleted=true},
-            "_deleted controls the deleted field."
-        },
-        {
-            {[{<<"_deleted">>, false}]},
-            #doc{},
-            "{\"_deleted\": false} is ok."
-        },
-        {
-            {[
-                {<<"_revisions">>, {[
-                    {<<"start">>, 4},
-                    {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}
-                ]}},
-                {<<"_rev">>, <<"6-something">>}
-            ]},
-            #doc{revs={4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
-            "_revisions attribute are preferred to _rev."
-        },
-        {
-            {[{<<"_revs_info">>, dropping}]},
-            #doc{},
-            "Drops _revs_info."
-        },
-        {
-            {[{<<"_local_seq">>, dropping}]},
-            #doc{},
-            "Drops _local_seq."
-        },
-        {
-            {[{<<"_conflicts">>, dropping}]},
-            #doc{},
-            "Drops _conflicts."
-        },
-        {
-            {[{<<"_deleted_conflicts">>, dropping}]},
-            #doc{},
-            "Drops _deleted_conflicts."
-        }
-    ],
-
-    lists:foreach(fun({EJson, Expect, Mesg}) ->
-        etap:is(couch_doc:from_json_obj(EJson), Expect, Mesg)
-    end, Cases),
-    ok.
-
-test_from_json_errors() ->
-    Cases = [
-        {
-            [],
-            {bad_request, "Document must be a JSON object"},
-            "arrays are invalid"
-        },
-        {
-            4,
-            {bad_request, "Document must be a JSON object"},
-            "integers are invalid"
-        },
-        {
-            true,
-            {bad_request, "Document must be a JSON object"},
-            "literals are invalid"
-        },
-        {
-            {[{<<"_id">>, {[{<<"foo">>, 5}]}}]},
-            {bad_request, <<"Document id must be a string">>},
-            "Document id must be a string."
-        },
-        {
-            {[{<<"_id">>, <<"_random">>}]},
-            {bad_request,
-                <<"Only reserved document ids may start with underscore.">>},
-            "Disallow arbitrary underscore prefixed docids."
-        },
-        {
-            {[{<<"_rev">>, 5}]},
-            {bad_request, <<"Invalid rev format">>},
-            "_rev must be a string"
-        },
-        {
-            {[{<<"_rev">>, "foobar"}]},
-            {bad_request, <<"Invalid rev format">>},
-            "_rev must be %d-%s"
-        },
-        {
-            {[{<<"_rev">>, "foo-bar"}]},
-            "Error if _rev's integer expection is broken."
-        },
-        {
-            {[{<<"_revisions">>, {[{<<"start">>, true}]}}]},
-            {doc_validation, "_revisions.start isn't an integer."},
-            "_revisions.start must be an integer."
-        },
-        {
-            {[{<<"_revisions">>, {[
-                {<<"start">>, 0},
-                {<<"ids">>, 5}
-            ]}}]},
-            {doc_validation, "_revisions.ids isn't a array."},
-            "_revions.ids must be a list."
-        },
-        {
-            {[{<<"_revisions">>, {[
-                {<<"start">>, 0},
-                {<<"ids">>, [5]}
-            ]}}]},
-            {doc_validation, "RevId isn't a string"},
-            "Revision ids must be strings."
-        },
-        {
-            {[{<<"_something">>, 5}]},
-            {doc_validation, <<"Bad special document member: _something">>},
-            "Underscore prefix fields are reserved."
-        }
-    ],
-
-    lists:foreach(fun
-        ({EJson, Expect, Mesg}) ->
-            Error = (catch couch_doc:from_json_obj(EJson)),
-            etap:is(Error, Expect, Mesg);
-        ({EJson, Mesg}) ->
-            try
-                couch_doc:from_json_obj(EJson),
-                etap:ok(false, "Conversion failed to raise an exception.")
-            catch
-                _:_ -> etap:ok(true, Mesg)
-            end
-    end, Cases),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/09677555/test/etap/031-doc-to-json.t
----------------------------------------------------------------------
diff --git a/test/etap/031-doc-to-json.t b/test/etap/031-doc-to-json.t
deleted file mode 100755
index ce950f9..0000000
--- a/test/etap/031-doc-to-json.t
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% XXX: Figure out how to -include("couch_db.hrl")
--record(doc, {id= <<"">>, revs={0, []}, body={[]},
-            atts=[], deleted=false, meta=[]}).
--record(att, {name, type, att_len, disk_len, md5= <<>>, revpos=0, data,
-            encoding=identity}).
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(12),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(test_util:config_files()),
-    couch_config:set("attachments", "compression_level", "0", false),
-    ok = test_to_json_success(),
-    ok.
-
-test_to_json_success() ->
-    Cases = [
-        {
-            #doc{},
-            {[{<<"_id">>, <<"">>}]},
-            "Empty docs are {\"_id\": \"\"}"
-        },
-        {
-            #doc{id= <<"foo">>},
-            {[{<<"_id">>, <<"foo">>}]},
-            "_id is added."
-        },
-        {
-            #doc{revs={5, ["foo"]}},
-            {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]},
-            "_rev is added."
-        },
-        {
-            [revs],
-            #doc{revs={5, [<<"first">>, <<"second">>]}},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_rev">>, <<"5-first">>},
-                {<<"_revisions">>, {[
-                    {<<"start">>, 5},
-                    {<<"ids">>, [<<"first">>, <<"second">>]}
-                ]}}
-            ]},
-            "_revisions include with revs option"
-        },
-        {
-            #doc{body={[{<<"foo">>, <<"bar">>}]}},
-            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]},
-            "Arbitrary fields are added."
-        },
-        {
-            #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}},
-            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
-            "Deleted docs no longer drop body members."
-        },
-        {
-            #doc{meta=[
-                {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_revs_info">>, [
-                    {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
-                    {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
-                ]}
-            ]},
-            "_revs_info field is added correctly."
-        },
-        {
-            #doc{meta=[{local_seq, 5}]},
-            {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]},
-            "_local_seq is added as an integer."
-        },
-        {
-            #doc{meta=[{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]}
-            ]},
-            "_conflicts is added as an array of strings."
-        },
-        {
-            #doc{meta=[{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
-            ]},
-            "_deleted_conflicsts is added as an array of strings."
-        },
-        {
-            #doc{atts=[
-                #att{
-                    name = <<"big.xml">>, 
-                    type = <<"xml/sucks">>, 
-                    data = fun() -> ok end,
-                    revpos = 1,
-                    att_len = 400,
-                    disk_len = 400
-                },
-                #att{
-                    name = <<"fast.json">>, 
-                    type = <<"json/ftw">>, 
-                    data = <<"{\"so\": \"there!\"}">>,
-                    revpos = 1,
-                    att_len = 16,
-                    disk_len = 16
-                }
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_attachments">>, {[
-                    {<<"big.xml">>, {[
-                        {<<"content_type">>, <<"xml/sucks">>},
-                        {<<"revpos">>, 1},
-                        {<<"length">>, 400},
-                        {<<"stub">>, true}
-                    ]}},
-                    {<<"fast.json">>, {[
-                        {<<"content_type">>, <<"json/ftw">>},
-                        {<<"revpos">>, 1},
-                        {<<"length">>, 16},
-                        {<<"stub">>, true}
-                    ]}}
-                ]}}
-            ]},
-            "Attachments attached as stubs only include a length."
-        },
-        {
-            [attachments],
-            #doc{atts=[
-                #att{
-                    name = <<"stuff.txt">>,
-                    type = <<"text/plain">>,
-                    data = fun() -> <<"diet pepsi">> end,
-                    revpos = 1,
-                    att_len = 10,
-                    disk_len = 10
-                },
-                #att{
-                    name = <<"food.now">>,
-                    type = <<"application/food">>,
-                    revpos = 1,
-                    data = <<"sammich">>
-                }
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_attachments">>, {[
-                    {<<"stuff.txt">>, {[
-                        {<<"content_type">>, <<"text/plain">>},
-                        {<<"revpos">>, 1},
-                        {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
-                    ]}},
-                    {<<"food.now">>, {[
-                        {<<"content_type">>, <<"application/food">>},
-                        {<<"revpos">>, 1},
-                        {<<"data">>, <<"c2FtbWljaA==">>}
-                    ]}}
-                ]}}
-            ]},
-            "Attachments included inline with attachments option."
-        }
-    ],
-
-    lists:foreach(fun
-        ({Doc, EJson, Mesg}) ->
-            etap:is(couch_doc:to_json_obj(Doc, []), EJson, Mesg);
-        ({Options, Doc, EJson, Mesg}) ->
-            etap:is(couch_doc:to_json_obj(Doc, Options), EJson, Mesg)
-    end, Cases),
-    ok.
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/09677555/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 2e86144..b40b095 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,8 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    030-doc-from-json.t \
-    031-doc-to-json.t \
     040-util.t \
     041-uuid-gen-id.ini \
     041-uuid-gen-seq.ini \


[12/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 050-stream.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/11e45079
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/11e45079
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/11e45079

Branch: refs/heads/1963-eunit
Commit: 11e45079c6c303ad8039a8a88f76f6ade9d2cba2
Parents: 720e32b
Author: Alexander Shorin <kx...@apache.org>
Authored: Sun May 18 14:45:05 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am            |  1 +
 test/couchdb/couch_stream_tests.erl | 99 ++++++++++++++++++++++++++++++++
 test/etap/050-stream.t              | 87 ----------------------------
 test/etap/Makefile.am               |  1 -
 4 files changed, 100 insertions(+), 88 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/11e45079/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 58a6840..948ba7c 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -24,6 +24,7 @@ eunit_files = \
     couch_doc_json_tests.erl \
     couch_uuids_tests.erl \
     couch_work_queue_tests.erl \
+    couch_stream_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/11e45079/test/couchdb/couch_stream_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_stream_tests.erl b/test/couchdb/couch_stream_tests.erl
new file mode 100644
index 0000000..580477f
--- /dev/null
+++ b/test/couchdb/couch_stream_tests.erl
@@ -0,0 +1,99 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_stream_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+setup() ->
+    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
+    {ok, Stream} = couch_stream:open(Fd),
+    {Fd, Stream}.
+
+teardown({Fd, _}) ->
+    ok = couch_file:close(Fd).
+
+
+stream_test_() ->
+    {
+        "CouchDB stream test",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_write/1,
+                fun should_write_consecutive/1,
+                fun should_write_empty_binary/1,
+                fun should_return_file_pointers_on_close/1,
+                fun should_return_stream_size_on_close/1,
+                fun should_return_valid_pointers/1,
+                fun should_recall_last_pointer_position/1,
+                fun should_stream_more_with_4K_chunk_size/1
+            ]
+        }
+    }.
+
+
+should_write({_, Stream}) ->
+    ?_assertMatch(ok, couch_stream:write(Stream, <<"food">>)).
+
+should_write_consecutive({_, Stream}) ->
+    couch_stream:write(Stream, <<"food">>),
+    ?_assertMatch(ok, couch_stream:write(Stream, <<"foob">>)).
+
+should_write_empty_binary({_, Stream}) ->
+    ?_assertMatch(ok, couch_stream:write(Stream, <<>>)).
+
+should_return_file_pointers_on_close({_, Stream}) ->
+    couch_stream:write(Stream, <<"foodfoob">>),
+    {Ptrs, _, _, _, _} = couch_stream:close(Stream),
+    ?_assertMatch([{0, 8}], Ptrs).
+
+should_return_stream_size_on_close({_, Stream}) ->
+    couch_stream:write(Stream, <<"foodfoob">>),
+    {_, Length, _, _, _} = couch_stream:close(Stream),
+    ?_assertEqual(8, Length).
+
+should_return_valid_pointers({Fd, Stream}) ->
+    couch_stream:write(Stream, <<"foodfoob">>),
+    {Ptrs, _, _, _, _} = couch_stream:close(Stream),
+    ?_assertMatch(<<"foodfoob">>, read_all(Fd, Ptrs)).
+
+should_recall_last_pointer_position({Fd, Stream}) ->
+    couch_stream:write(Stream, <<"foodfoob">>),
+    {_, _, _, _, _} = couch_stream:close(Stream),
+    {ok, ExpPtr} = couch_file:bytes(Fd),
+    {ok, Stream2} = couch_stream:open(Fd),
+    ZeroBits = <<0:(8 * 10)>>,
+    OneBits = <<1:(8 * 10)>>,
+    ok = couch_stream:write(Stream2, OneBits),
+    ok = couch_stream:write(Stream2, ZeroBits),
+    {Ptrs, 20, _, _, _} = couch_stream:close(Stream2),
+    [{ExpPtr, 20}] = Ptrs,
+    AllBits = iolist_to_binary([OneBits, ZeroBits]),
+    ?_assertMatch(AllBits, read_all(Fd, Ptrs)).
+
+should_stream_more_with_4K_chunk_size({Fd, _}) ->
+    {ok, Stream} = couch_stream:open(Fd, [{buffer_size, 4096}]),
+    lists:foldl(
+        fun(_, Acc) ->
+            Data = <<"a1b2c">>,
+            couch_stream:write(Stream, Data),
+            [Data | Acc]
+        end, [], lists:seq(1, 1024)),
+    ?_assertMatch({[{0, 4100}, {4106, 1020}], 5120, _, _, _},
+                  couch_stream:close(Stream)).
+
+
+read_all(Fd, PosList) ->
+    Data = couch_stream:foldl(Fd, PosList, fun(Bin, Acc) -> [Bin, Acc] end, []),
+    iolist_to_binary(Data).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/11e45079/test/etap/050-stream.t
----------------------------------------------------------------------
diff --git a/test/etap/050-stream.t b/test/etap/050-stream.t
deleted file mode 100755
index 0251f00..0000000
--- a/test/etap/050-stream.t
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(13),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-read_all(Fd, PosList) ->
-    Data = couch_stream:foldl(Fd, PosList, fun(Bin, Acc) -> [Bin, Acc] end, []),
-    iolist_to_binary(Data).
-
-test() ->
-    {ok, Fd} = couch_file:open("test/etap/temp.050", [create,overwrite]),
-    {ok, Stream} = couch_stream:open(Fd),
-
-    etap:is(ok, couch_stream:write(Stream, <<"food">>),
-        "Writing to streams works."),
-
-    etap:is(ok, couch_stream:write(Stream, <<"foob">>),
-        "Consecutive writing to streams works."),
-
-    etap:is(ok, couch_stream:write(Stream, <<>>),
-        "Writing an empty binary does nothing."),
-
-    {Ptrs, Length, _, _, _} = couch_stream:close(Stream),
-    etap:is(Ptrs, [{0, 8}], "Close returns the file pointers."),
-    etap:is(Length, 8, "Close also returns the number of bytes written."),
-    etap:is(<<"foodfoob">>, read_all(Fd, Ptrs), "Returned pointers are valid."),
-
-    % Remember where we expect the pointer to be.
-    {ok, ExpPtr} = couch_file:bytes(Fd),
-    {ok, Stream2} = couch_stream:open(Fd),
-    OneBits = <<1:(8*10)>>,
-    etap:is(ok, couch_stream:write(Stream2, OneBits),
-        "Successfully wrote 79 zero bits and 1 one bit."),
-
-    ZeroBits = <<0:(8*10)>>,
-    etap:is(ok, couch_stream:write(Stream2, ZeroBits),
-        "Successfully wrote 80 0 bits."),
-
-    {Ptrs2, Length2, _, _, _} = couch_stream:close(Stream2),
-    etap:is(Ptrs2, [{ExpPtr, 20}], "Closing stream returns the file pointers."),
-    etap:is(Length2, 20, "Length written is 160 bytes."),
-
-    AllBits = iolist_to_binary([OneBits,ZeroBits]),
-    etap:is(AllBits, read_all(Fd, Ptrs2), "Returned pointers are valid."),
-
-    % Stream more the 4K chunk size.
-    {ok, ExpPtr2} = couch_file:bytes(Fd),
-    {ok, Stream3} = couch_stream:open(Fd, [{buffer_size, 4096}]),
-    lists:foldl(fun(_, Acc) ->
-        Data = <<"a1b2c">>,
-        couch_stream:write(Stream3, Data),
-        [Data | Acc]
-    end, [], lists:seq(1, 1024)),
-    {Ptrs3, Length3, _, _, _} = couch_stream:close(Stream3),
-
-    % 4095 because of 5 * 4096 rem 5 (last write before exceeding threshold)
-    % + 5 puts us over the threshold
-    % + 4 bytes for the term_to_binary adding a length header
-    % + 1 byte every 4K for tail append headers
-    SecondPtr = ExpPtr2 + 4095 + 5 + 4 + 1,
-    etap:is(Ptrs3, [{ExpPtr2, 4100}, {SecondPtr, 1020}], "Pointers every 4K bytes."),
-    etap:is(Length3, 5120, "Wrote the expected 5K bytes."),
-
-    couch_file:close(Fd),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/11e45079/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 4eef1a0..b6ec287 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    050-stream.t \
     060-kt-merging.t \
     061-kt-missing-leaves.t \
     062-kt-remove-leaves.t \


[14/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 042-work-queue.t etap test suite to eunit

Etap tests were made in flow style, testing the same things multiple
times without real need. For eunit they are split into small test cases
to focus on testing goals.

Timeout on receive is decreased from 3000 to 100.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/8d0ab7af
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/8d0ab7af
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/8d0ab7af

Branch: refs/heads/1963-eunit
Commit: 8d0ab7af784e12c8a34b461c77079b999b99485c
Parents: f33624f
Author: Alexander Shorin <kx...@apache.org>
Authored: Sun May 18 14:04:08 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                |   1 +
 test/couchdb/couch_work_queue_tests.erl | 393 +++++++++++++++++++++
 test/etap/042-work-queue.t              | 500 ---------------------------
 test/etap/Makefile.am                   |   1 -
 4 files changed, 394 insertions(+), 501 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/8d0ab7af/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index e7d13ff..58a6840 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -23,6 +23,7 @@ eunit_files = \
     couch_btree_tests.erl \
     couch_doc_json_tests.erl \
     couch_uuids_tests.erl \
+    couch_work_queue_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/8d0ab7af/test/couchdb/couch_work_queue_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_work_queue_tests.erl b/test/couchdb/couch_work_queue_tests.erl
new file mode 100644
index 0000000..0a65c6a
--- /dev/null
+++ b/test/couchdb/couch_work_queue_tests.erl
@@ -0,0 +1,393 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+%% Split big cases to little ones
+%% Drop timeout borderline from 3000 to 1500
+
+-module(couch_work_queue_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+-define(TIMEOUT, 100).
+
+
+setup(Opts) ->
+    {ok, Q} = couch_work_queue:new(Opts),
+    Producer = spawn_producer(Q),
+    Consumer = spawn_consumer(Q),
+    {Q, Producer, Consumer}.
+
+setup_max_items() ->
+    setup([{max_items, 3}]).
+
+setup_max_size() ->
+    setup([{max_size, 160}]).
+
+setup_max_items_and_size() ->
+    setup([{max_size, 160}, {max_items, 3}]).
+
+setup_multi_workers() ->
+    {Q, Producer, Consumer1} = setup([{max_size, 160},
+                                      {max_items, 3},
+                                      {multi_workers, true}]),
+    Consumer2 = spawn_consumer(Q),
+    Consumer3 = spawn_consumer(Q),
+    {Q, Producer, [Consumer1, Consumer2, Consumer3]}.
+
+teardown({Q, Producer, Consumers}) when is_list(Consumers) ->
+    % consume all to unblock and let producer/consumer stop without timeout
+    [consume(Consumer, all) || Consumer <- Consumers],
+
+    ok = close_queue(Q),
+    ok = stop(Producer, "producer"),
+    R = [stop(Consumer, "consumer") || Consumer <- Consumers],
+    R = [ok || _ <- Consumers],
+    ok;
+teardown({Q, Producer, Consumer}) ->
+    teardown({Q, Producer, [Consumer]}).
+
+
+single_consumer_test_() ->
+    {
+        "Single producer and consumer",
+        [
+            {
+                "Queue with 3 max items",
+                {
+                    foreach,
+                    fun setup_max_items/0, fun teardown/1,
+                    single_consumer_max_item_count() ++ common_cases()
+                }
+            },
+            {
+                "Queue with max size of 160 bytes",
+                {
+                    foreach,
+                    fun setup_max_size/0, fun teardown/1,
+                    single_consumer_max_size() ++ common_cases()
+                }
+            },
+            {
+                "Queue with max size of 160 bytes and 3 max items",
+                {
+                    foreach,
+                    fun setup_max_items_and_size/0, fun teardown/1,
+                    single_consumer_max_items_and_size() ++ common_cases()
+                }
+            }
+        ]
+    }.
+
+multiple_consumers_test_() ->
+    {
+        "Single producer and multiple consumers",
+        [
+            {
+                "Queue with max size of 160 bytes and 3 max items",
+                {
+                    foreach,
+                    fun setup_multi_workers/0, fun teardown/1,
+                    common_cases() ++ multiple_consumers()
+                }
+
+            }
+        ]
+    }.
+
+common_cases()->
+    [
+        fun should_block_consumer_on_dequeue_from_empty_queue/1,
+        fun should_consume_right_item/1,
+        fun should_timeout_on_close_non_empty_queue/1,
+        fun should_not_block_producer_for_non_empty_queue_after_close/1,
+        fun should_be_closed/1
+    ].
+
+single_consumer_max_item_count()->
+    [
+        fun should_have_no_items_for_new_queue/1,
+        fun should_block_producer_on_full_queue_count/1,
+        fun should_receive_first_queued_item/1,
+        fun should_consume_multiple_items/1,
+        fun should_consume_all/1
+    ].
+
+single_consumer_max_size()->
+    [
+        fun should_have_zero_size_for_new_queue/1,
+        fun should_block_producer_on_full_queue_size/1,
+        fun should_increase_queue_size_on_produce/1,
+        fun should_receive_first_queued_item/1,
+        fun should_consume_multiple_items/1,
+        fun should_consume_all/1
+    ].
+
+single_consumer_max_items_and_size() ->
+    single_consumer_max_item_count() ++ single_consumer_max_size().
+
+multiple_consumers() ->
+    [
+        fun should_have_zero_size_for_new_queue/1,
+        fun should_have_no_items_for_new_queue/1,
+        fun should_increase_queue_size_on_produce/1
+    ].
+
+
+should_have_no_items_for_new_queue({Q, _, _}) ->
+    ?_assertEqual(0, couch_work_queue:item_count(Q)).
+
+should_have_zero_size_for_new_queue({Q, _, _}) ->
+    ?_assertEqual(0, couch_work_queue:size(Q)).
+
+should_block_consumer_on_dequeue_from_empty_queue({_, _, Consumers}) when is_list(Consumers) ->
+    [consume(C, 2) || C <- Consumers],
+    Pongs = [ping(C) || C <- Consumers],
+    ?_assertMatch([timeout, timeout, timeout], Pongs);
+should_block_consumer_on_dequeue_from_empty_queue({_, _, Consumer}) ->
+    consume(Consumer, 1),
+    Pong = ping(Consumer),
+    ?_assertMatch(timeout, Pong).
+
+should_consume_right_item({Q, Producer, Consumers}) when is_list(Consumers) ->
+    [consume(C, 3) || C <- Consumers],
+
+    Item1 = produce(Producer, 10),
+    ok = ping(Producer),
+    0 = couch_work_queue:item_count(Q),
+    0 = couch_work_queue:size(Q),
+
+    Item2 = produce(Producer, 10),
+    ok = ping(Producer),
+    0 = couch_work_queue:item_count(Q),
+    0 = couch_work_queue:size(Q),
+
+    Item3 = produce(Producer, 10),
+    ok = ping(Producer),
+    0 = couch_work_queue:item_count(Q),
+    0 = couch_work_queue:size(Q),
+
+    R = [{ping(C), Item}
+         || {C, Item} <- lists:zip(Consumers, [Item1, Item2, Item3])],
+
+    ?_assertMatch([{ok, Item1}, {ok, Item2}, {ok, Item3}], R);
+should_consume_right_item({_, Producer, Consumer}) ->
+    consume(Consumer, 1),
+    Item = produce(Producer, 10),
+    produce(Producer, 20),
+    ok = ping(Producer),
+    ok = ping(Consumer),
+    {ok, Items} = last_consumer_items(Consumer),
+    ?_assertMatch([Item], Items).
+
+should_increase_queue_size_on_produce({Q, Producer, _}) ->
+    produce(Producer, 50),
+    ok = ping(Producer),
+    Count1 = couch_work_queue:item_count(Q),
+    Size1 = couch_work_queue:size(Q),
+
+    produce(Producer, 10),
+    Count2 = couch_work_queue:item_count(Q),
+    Size2 = couch_work_queue:size(Q),
+
+    ?_assertEqual([{Count1, Size1}, {Count2, Size2}], [{1, 50}, {2, 60}]).
+
+should_block_producer_on_full_queue_count({Q, Producer, _}) ->
+    produce(Producer, 10),
+    1 = couch_work_queue:item_count(Q),
+    ok = ping(Producer),
+
+    produce(Producer, 15),
+    2 = couch_work_queue:item_count(Q),
+    ok = ping(Producer),
+
+    produce(Producer, 20),
+    3 = couch_work_queue:item_count(Q),
+    Pong = ping(Producer),
+
+    ?_assertMatch(timeout, Pong).
+
+should_block_producer_on_full_queue_size({Q, Producer, _}) ->
+    produce(Producer, 100),
+    ok = ping(Producer),
+    1 = couch_work_queue:item_count(Q),
+    100 = couch_work_queue:size(Q),
+
+    produce(Producer, 110),
+    Pong = ping(Producer),
+    2 = couch_work_queue:item_count(Q),
+    210 = couch_work_queue:size(Q),
+
+    ?_assertMatch(timeout, Pong).
+
+should_consume_multiple_items({_, Producer, Consumer}) ->
+    Item1 = produce(Producer, 10),
+    ok = ping(Producer),
+
+    Item2 = produce(Producer, 15),
+    ok = ping(Producer),
+
+    consume(Consumer, 2),
+
+    {ok, Items} = last_consumer_items(Consumer),
+    ?_assertMatch([Item1, Item2], Items).
+
+should_receive_first_queued_item({Q, Producer, Consumer}) ->
+    consume(Consumer, 100),
+    timeout = ping(Consumer),
+
+    Item = produce(Producer, 11),
+    ok = ping(Producer),
+
+    ok = ping(Consumer),
+    0 = couch_work_queue:item_count(Q),
+
+    {ok, Items} = last_consumer_items(Consumer),
+    ?_assertMatch([Item], Items).
+
+should_consume_all({_, Producer, Consumer}) ->
+    Item1 = produce(Producer, 10),
+    Item2 = produce(Producer, 15),
+    Item3 = produce(Producer, 20),
+
+    consume(Consumer, all),
+
+    {ok, Items} = last_consumer_items(Consumer),
+    ?_assertMatch([Item1, Item2, Item3], Items).
+
+should_timeout_on_close_non_empty_queue({Q, Producer, _}) ->
+    produce(Producer, 1),
+    Status = close_queue(Q),
+
+    ?_assertMatch(timeout, Status).
+
+should_not_block_producer_for_non_empty_queue_after_close({Q, Producer, _}) ->
+    produce(Producer, 1),
+    close_queue(Q),
+    Pong = ping(Producer),
+    Size = couch_work_queue:size(Q),
+    Count = couch_work_queue:item_count(Q),
+
+    ?_assertMatch({ok, 1, 1}, {Pong, Size, Count}).
+
+should_be_closed({Q, _, Consumers}) when is_list(Consumers) ->
+    ok = close_queue(Q),
+
+    [consume(C, 1) || C <- Consumers],
+
+    LastConsumerItems = [last_consumer_items(C) || C <- Consumers],
+    ItemsCount = couch_work_queue:item_count(Q),
+    Size = couch_work_queue:size(Q),
+
+    ?_assertMatch({[closed, closed, closed], closed, closed},
+                  {LastConsumerItems, ItemsCount, Size});
+should_be_closed({Q, _, Consumer}) ->
+    ok = close_queue(Q),
+
+    consume(Consumer, 1),
+
+    LastConsumerItems = last_consumer_items(Consumer),
+    ItemsCount = couch_work_queue:item_count(Q),
+    Size = couch_work_queue:size(Q),
+
+    ?_assertMatch({closed, closed, closed},
+                  {LastConsumerItems, ItemsCount, Size}).
+
+
+close_queue(Q) ->
+    ok = couch_work_queue:close(Q),
+    MonRef = erlang:monitor(process, Q),
+    receive
+        {'DOWN', MonRef, process, Q, _Reason} -> ok
+    after ?TIMEOUT ->
+        erlang:demonitor(MonRef),
+        timeout
+    end.
+
+spawn_consumer(Q) ->
+    Parent = self(),
+    spawn(fun() -> consumer_loop(Parent, Q, nil) end).
+
+consumer_loop(Parent, Q, PrevItem) ->
+    receive
+        {stop, Ref} ->
+            Parent ! {ok, Ref};
+        {ping, Ref} ->
+            Parent ! {pong, Ref},
+            consumer_loop(Parent, Q, PrevItem);
+        {last_item, Ref} ->
+            Parent ! {item, Ref, PrevItem},
+            consumer_loop(Parent, Q, PrevItem);
+        {consume, N} ->
+            Result = couch_work_queue:dequeue(Q, N),
+            consumer_loop(Parent, Q, Result)
+    end.
+
+spawn_producer(Q) ->
+    Parent = self(),
+    spawn(fun() -> producer_loop(Parent, Q) end).
+
+producer_loop(Parent, Q) ->
+    receive
+        {stop, Ref} ->
+            Parent ! {ok, Ref};
+        {ping, Ref} ->
+            Parent ! {pong, Ref},
+            producer_loop(Parent, Q);
+        {produce, Ref, Size} ->
+            Item = crypto:rand_bytes(Size),
+            Parent ! {item, Ref, Item},
+            ok = couch_work_queue:queue(Q, Item),
+            producer_loop(Parent, Q)
+    end.
+
+consume(Consumer, N) ->
+    Consumer ! {consume, N}.
+
+last_consumer_items(Consumer) ->
+    Ref = make_ref(),
+    Consumer ! {last_item, Ref},
+    receive
+        {item, Ref, Items} ->
+            Items
+    after ?TIMEOUT ->
+        timeout
+    end.
+
+produce(Producer, Size) ->
+    Ref = make_ref(),
+    Producer ! {produce, Ref, Size},
+    receive
+        {item, Ref, Item} ->
+            Item
+    after ?TIMEOUT ->
+        throw({timeout, "Timeout asking producer to produce an item"})
+    end.
+
+ping(Pid) ->
+    Ref = make_ref(),
+    Pid ! {ping, Ref},
+    receive
+        {pong, Ref} ->
+            ok
+    after ?TIMEOUT ->
+        timeout
+    end.
+
+stop(Pid, Name) ->
+    Ref = make_ref(),
+    Pid ! {stop, Ref},
+    receive
+        {ok, Ref} -> ok
+    after ?TIMEOUT ->
+        ?debugMsg("Timeout stopping " ++ Name),
+        timeout
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/8d0ab7af/test/etap/042-work-queue.t
----------------------------------------------------------------------
diff --git a/test/etap/042-work-queue.t b/test/etap/042-work-queue.t
deleted file mode 100755
index 8594a6f..0000000
--- a/test/etap/042-work-queue.t
+++ /dev/null
@@ -1,500 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(155),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-
-test() ->
-    ok = crypto:start(),
-    test_single_consumer_max_item_count(),
-    test_single_consumer_max_size(),
-    test_single_consumer_max_item_count_and_size(),
-    test_multiple_consumers(),
-    ok.
-
-
-test_single_consumer_max_item_count() ->
-    etap:diag("Spawning a queue with 3 max items, 1 producer and 1 consumer"),
-
-    {ok, Q} = couch_work_queue:new([{max_items, 3}]),
-    Producer = spawn_producer(Q),
-    Consumer = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), timeout,
-        "Consumer blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item1 = produce(Producer, 10),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-
-    etap:is(ping(Consumer), ok, "Consumer unblocked"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item1]},
-        "Consumer received the right item"),
-
-    Item2 = produce(Producer, 20),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-
-    Item3 = produce(Producer, 15),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-
-    Item4 = produce(Producer, 3),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(ping(Producer), timeout, "Producer blocked with full queue"),
-
-    consume(Consumer, 2),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 2 items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item2, Item3]},
-        "Consumer received the right items"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-
-    consume(Consumer, 2),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 2 items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item4]},
-        "Consumer received the right item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    consume(Consumer, 100),
-    etap:is(ping(Consumer), timeout,
-        "Consumer blocked when attempting to dequeue 100 items from empty queue"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    Item5 = produce(Producer, 11),
-    etap:is(ping(Producer), ok, "Producer not blocked with empty queue"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    Item6 = produce(Producer, 19),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-
-    Item7 = produce(Producer, 2),
-    etap:is(ping(Producer), ok, "Producer not blocked with non full queue"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-
-    Item8 = produce(Producer, 33),
-    etap:is(ping(Producer), timeout, "Producer blocked with full queue"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-
-    etap:is(ping(Consumer), ok, "Consumer unblocked"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item5]},
-        "Consumer received the first queued item"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item6, Item7, Item8]},
-        "Consumer received all queued items"),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-
-    etap:is(close_queue(Q), ok, "Closed queue"),
-    consume(Consumer, 1),
-    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    stop(Producer, "producer"),
-    stop(Consumer, "consumer").
-
-
-
-test_single_consumer_max_size() ->
-    etap:diag("Spawning a queue with max size of 160 bytes, "
-        "1 producer and 1 consumer"),
-
-    {ok, Q} = couch_work_queue:new([{max_size, 160}]),
-    Producer = spawn_producer(Q),
-    Consumer = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), timeout,
-        "Consumer blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item1 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-
-    etap:is(ping(Consumer), ok, "Consumer unblocked"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item1]},
-        "Consumer received the right item"),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item2 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 50, "Queue size is 50 bytes"),
-
-    Item3 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 100, "Queue size is 100 bytes"),
-
-    Item4 = produce(Producer, 61),
-    etap:is(ping(Producer), timeout, "Producer blocked"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(couch_work_queue:size(Q), 161, "Queue size is 161 bytes"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 1 item from full queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item2]},
-        "Consumer received the right item"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 111, "Queue size is 111 bytes"),
-
-    Item5 = produce(Producer, 20),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(couch_work_queue:size(Q), 131, "Queue size is 131 bytes"),
-
-    Item6 = produce(Producer, 40),
-    etap:is(ping(Producer), timeout, "Producer blocked"),
-    etap:is(couch_work_queue:item_count(Q), 4, "Queue item count is 4"),
-    etap:is(couch_work_queue:size(Q), 171, "Queue size is 171 bytes"),
-
-    etap:is(close_queue(Q), timeout,
-        "Timeout when trying to close non-empty queue"),
-
-    consume(Consumer, 2),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 2 items from full queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item3, Item4]},
-        "Consumer received the right items"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 60, "Queue size is 60 bytes"),
-
-    etap:is(close_queue(Q), timeout,
-        "Timeout when trying to close non-empty queue"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item5, Item6]},
-        "Consumer received the right items"),
-
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    consume(Consumer, all),
-    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
-
-    stop(Producer, "producer"),
-    stop(Consumer, "consumer").
-
-
-test_single_consumer_max_item_count_and_size() ->
-    etap:diag("Spawning a queue with 3 max items, max size of 200 bytes, "
-        "1 producer and 1 consumer"),
-
-    {ok, Q} = couch_work_queue:new([{max_items, 3}, {max_size, 200}]),
-    Producer = spawn_producer(Q),
-    Consumer = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item1 = produce(Producer, 100),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 100, "Queue size is 100 bytes"),
-
-    Item2 = produce(Producer, 110),
-    etap:is(ping(Producer), timeout,
-        "Producer blocked when queue size >= max_size"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 210, "Queue size is 210 bytes"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item1, Item2]},
-        "Consumer received the right items"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Producer), ok, "Producer not blocked anymore"),
-
-    Item3 = produce(Producer, 10),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 10, "Queue size is 10 bytes"),
-
-    Item4 = produce(Producer, 4),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 14, "Queue size is 14 bytes"),
-
-    Item5 = produce(Producer, 2),
-    etap:is(ping(Producer), timeout,
-        "Producer blocked when queue item count = max_items"),
-    etap:is(couch_work_queue:item_count(Q), 3, "Queue item count is 3"),
-    etap:is(couch_work_queue:size(Q), 16, "Queue size is 16 bytes"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 1 item from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item3]},
-       "Consumer received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 6, "Queue size is 6 bytes"),
-
-    etap:is(close_queue(Q), timeout,
-        "Timeout when trying to close non-empty queue"),
-
-    consume(Consumer, 1),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue 1 item from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item4]},
-       "Consumer received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 1, "Queue item count is 1"),
-    etap:is(couch_work_queue:size(Q), 2, "Queue size is 2 bytes"),
-
-    Item6 = produce(Producer, 50),
-    etap:is(ping(Producer), ok,
-        "Producer not blocked when queue is not full and already received"
-        " a close request"),
-    etap:is(couch_work_queue:item_count(Q), 2, "Queue item count is 2"),
-    etap:is(couch_work_queue:size(Q), 52, "Queue size is 52 bytes"),
-
-    consume(Consumer, all),
-    etap:is(ping(Consumer), ok,
-        "Consumer not blocked when attempting to dequeue all items from queue"),
-    etap:is(last_consumer_items(Consumer), {ok, [Item5, Item6]},
-       "Consumer received all queued items"),
-
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    consume(Consumer, 1),
-    etap:is(last_consumer_items(Consumer), closed, "Consumer got closed queue"),
-
-    stop(Producer, "producer"),
-    stop(Consumer, "consumer").
-
-
-test_multiple_consumers() ->
-    etap:diag("Spawning a queue with 3 max items, max size of 200 bytes, "
-        "1 producer and 3 consumers"),
-
-    {ok, Q} = couch_work_queue:new(
-        [{max_items, 3}, {max_size, 200}, {multi_workers, true}]),
-    Producer = spawn_producer(Q),
-    Consumer1 = spawn_consumer(Q),
-    Consumer2 = spawn_consumer(Q),
-    Consumer3 = spawn_consumer(Q),
-
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    consume(Consumer1, 1),
-    etap:is(ping(Consumer1), timeout,
-        "Consumer 1 blocked when attempting to dequeue 1 item from empty queue"),
-    consume(Consumer2, 2),
-    etap:is(ping(Consumer2), timeout,
-        "Consumer 2 blocked when attempting to dequeue 2 items from empty queue"),
-    consume(Consumer3, 1),
-    etap:is(ping(Consumer3), timeout,
-        "Consumer 3 blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item1 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item2 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    Item3 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Consumer1), ok, "Consumer 1 unblocked"),
-    etap:is(last_consumer_items(Consumer1), {ok, [Item1]},
-       "Consumer 1 received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Consumer2), ok, "Consumer 2 unblocked"),
-    etap:is(last_consumer_items(Consumer2), {ok, [Item2]},
-       "Consumer 2 received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(ping(Consumer3), ok, "Consumer 3 unblocked"),
-    etap:is(last_consumer_items(Consumer3), {ok, [Item3]},
-       "Consumer 3 received 1 item"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    consume(Consumer1, 1),
-    etap:is(ping(Consumer1), timeout,
-        "Consumer 1 blocked when attempting to dequeue 1 item from empty queue"),
-    consume(Consumer2, 2),
-    etap:is(ping(Consumer2), timeout,
-        "Consumer 2 blocked when attempting to dequeue 1 item from empty queue"),
-    consume(Consumer3, 1),
-    etap:is(ping(Consumer3), timeout,
-        "Consumer 3 blocked when attempting to dequeue 1 item from empty queue"),
-
-    Item4 = produce(Producer, 50),
-    etap:is(ping(Producer), ok, "Producer not blocked"),
-    etap:is(couch_work_queue:item_count(Q), 0, "Queue item count is 0"),
-    etap:is(couch_work_queue:size(Q), 0, "Queue size is 0 bytes"),
-
-    etap:is(close_queue(Q), ok, "Closed queue"),
-
-    etap:is(ping(Consumer1), ok, "Consumer 1 unblocked"),
-    etap:is(last_consumer_items(Consumer1), {ok, [Item4]},
-       "Consumer 1 received 1 item"),
-
-    etap:is(couch_work_queue:item_count(Q), closed, "Queue closed"),
-    etap:is(couch_work_queue:size(Q), closed, "Queue closed"),
-
-    etap:is(ping(Consumer2), ok, "Consumer 2 unblocked"),
-    etap:is(last_consumer_items(Consumer2), closed,
-        "Consumer 2 received 'closed' atom"),
-
-    etap:is(ping(Consumer3), ok, "Consumer 3 unblocked"),
-    etap:is(last_consumer_items(Consumer3), closed,
-        "Consumer 3 received 'closed' atom"),
-
-    stop(Producer, "producer"),
-    stop(Consumer1, "consumer 1"),
-    stop(Consumer2, "consumer 2"),
-    stop(Consumer3, "consumer 3").
-
-
-close_queue(Q) ->
-    ok = couch_work_queue:close(Q),
-    MonRef = erlang:monitor(process, Q),
-    receive
-    {'DOWN', MonRef, process, Q, _Reason} ->
-         etap:diag("Queue closed")
-    after 3000 ->
-         erlang:demonitor(MonRef),
-         timeout
-    end.
-
-
-spawn_consumer(Q) ->
-    Parent = self(),
-    spawn(fun() -> consumer_loop(Parent, Q, nil) end).
-
-
-consumer_loop(Parent, Q, PrevItem) ->
-    receive
-    {stop, Ref} ->
-        Parent ! {ok, Ref};
-    {ping, Ref} ->
-        Parent ! {pong, Ref},
-        consumer_loop(Parent, Q, PrevItem);
-    {last_item, Ref} ->
-        Parent ! {item, Ref, PrevItem},
-        consumer_loop(Parent, Q, PrevItem);
-    {consume, N} ->
-        Result = couch_work_queue:dequeue(Q, N),
-        consumer_loop(Parent, Q, Result)
-    end.
-
-
-spawn_producer(Q) ->
-    Parent = self(),
-    spawn(fun() -> producer_loop(Parent, Q) end).
-
-
-producer_loop(Parent, Q) ->
-    receive
-    {stop, Ref} ->
-        Parent ! {ok, Ref};
-    {ping, Ref} ->
-        Parent ! {pong, Ref},
-        producer_loop(Parent, Q);
-    {produce, Ref, Size} ->
-        Item = crypto:rand_bytes(Size),
-        Parent ! {item, Ref, Item},
-        ok = couch_work_queue:queue(Q, Item),
-        producer_loop(Parent, Q)
-    end.
-
-
-consume(Consumer, N) ->
-    Consumer ! {consume, N}.
-
-
-last_consumer_items(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {last_item, Ref},
-    receive
-    {item, Ref, Items} ->
-        Items
-    after 3000 ->
-        timeout
-    end.
-
-
-produce(Producer, Size) ->
-    Ref = make_ref(),
-    Producer ! {produce, Ref, Size},
-    receive
-    {item, Ref, Item} ->
-        Item
-    after 3000 ->
-        etap:bail("Timeout asking producer to produce an item")
-    end.
-
-
-ping(Pid) ->
-    Ref = make_ref(),
-    Pid ! {ping, Ref},
-    receive
-    {pong, Ref} ->
-        ok
-    after 3000 ->
-        timeout
-    end.
-
-
-stop(Pid, Name) ->
-    Ref = make_ref(),
-    Pid ! {stop, Ref},
-    receive
-    {ok, Ref} ->
-        etap:diag("Stopped " ++ Name)
-    after 3000 ->
-        etap:bail("Timeout stopping " ++ Name)
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/8d0ab7af/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index f45853a..6bd2dad 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    042-work-queue.t \
     043-find-in-binary.t \
     050-stream.t \
     060-kt-merging.t \


[27/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 120-stats-collect.t etap test suite to eunit


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/39b0ec1a
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/39b0ec1a
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/39b0ec1a

Branch: refs/heads/1963-eunit
Commit: 39b0ec1ac81c5c2eea26abf2c22e7d730ac54c94
Parents: 5c95e8f
Author: Alexander Shorin <kx...@apache.org>
Authored: Mon May 26 23:57:38 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:27 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am           |   1 +
 test/couchdb/couch_stats_tests.erl | 193 ++++++++++++++++++++++++++++++++
 test/etap/120-stats-collect.t      | 150 -------------------------
 test/etap/Makefile.am              |   1 -
 4 files changed, 194 insertions(+), 151 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/39b0ec1a/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 0133601..afa8c56 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -36,6 +36,7 @@ eunit_files = \
     couch_config_tests.erl \
     couch_task_status_tests.erl \
     couch_ref_counter_tests.erl \
+    couch_stats_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/39b0ec1a/test/couchdb/couch_stats_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_stats_tests.erl b/test/couchdb/couch_stats_tests.erl
new file mode 100644
index 0000000..2086d7b
--- /dev/null
+++ b/test/couchdb/couch_stats_tests.erl
@@ -0,0 +1,193 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_stats_tests).
+
+-include("../../src/couchdb/couch_db.hrl").
+-include("couchdb_tests.hrl").
+
+-define(TIMEOUT, 1000).
+-define(SLEEPTIME, 100).
+
+
+setup_collector() ->
+    couch_stats_collector:start(),
+    ok.
+
+teardown_collector(_) ->
+    couch_stats_collector:stop(),
+    ok.
+
+
+couch_stats_collector_test_() ->
+    {
+        "CouchDB stats collector tests",
+        {
+            foreach,
+            fun setup_collector/0, fun teardown_collector/1,
+            [
+                should_increment_counter(),
+                should_decrement_counter(),
+                should_increment_and_decrement_counter(),
+                should_record_absolute_values(),
+                should_clear_absolute_values(),
+                should_track_process_count(),
+                should_increment_counter_multiple_times_per_pid(),
+                should_decrement_counter_on_process_exit(),
+                should_decrement_for_each_track_process_count_call_on_exit(),
+                should_return_all_counters_and_absolute_values(),
+                should_return_incremental_counters(),
+                should_return_absolute_values()
+            ]
+        }
+    }.
+
+
+should_increment_counter() ->
+    ?_assertEqual(100,
+        begin
+            AddCount = fun() -> couch_stats_collector:increment(foo) end,
+            repeat(AddCount, 100),
+            couch_stats_collector:get(foo)
+        end).
+
+should_decrement_counter() ->
+    ?_assertEqual(67,
+        begin
+            AddCount = fun() -> couch_stats_collector:increment(foo) end,
+            RemCount = fun() -> couch_stats_collector:decrement(foo) end,
+            repeat(AddCount, 100),
+            repeat(RemCount, 33),
+            couch_stats_collector:get(foo)
+        end).
+
+should_increment_and_decrement_counter() ->
+    ?_assertEqual(0,
+        begin
+            AddCount = fun() -> couch_stats_collector:increment(foo) end,
+            RemCount = fun() -> couch_stats_collector:decrement(foo) end,
+            repeat(AddCount, 100),
+            repeat(RemCount, 25),
+            repeat(AddCount, 10),
+            repeat(RemCount, 5),
+            repeat(RemCount, 80),
+            couch_stats_collector:get(foo)
+        end).
+
+should_record_absolute_values() ->
+    ?_assertEqual(lists:seq(1, 15),
+        begin
+            lists:map(fun(Val) ->
+                couch_stats_collector:record(bar, Val)
+            end, lists:seq(1, 15)),
+            couch_stats_collector:get(bar)
+        end).
+
+should_clear_absolute_values() ->
+    ?_assertEqual(nil,
+        begin
+            lists:map(fun(Val) ->
+                couch_stats_collector:record(bar, Val)
+            end, lists:seq(1, 15)),
+            couch_stats_collector:clear(bar),
+            couch_stats_collector:get(bar)
+        end).
+
+should_track_process_count() ->
+    ?_assertMatch({_, 1}, spawn_and_count(1)).
+
+should_increment_counter_multiple_times_per_pid() ->
+    ?_assertMatch({_, 3}, spawn_and_count(3)).
+
+should_decrement_counter_on_process_exit() ->
+    ?_assertEqual(2,
+        begin
+            {Pid, 1} = spawn_and_count(1),
+            spawn_and_count(2),
+            RefMon = erlang:monitor(process, Pid),
+            Pid ! sepuku,
+            receive
+                {'DOWN', RefMon, _, _, _} -> ok
+            after ?TIMEOUT ->
+                throw(timeout)
+            end,
+            % sleep for awhile to let collector handle the updates
+            % suddenly, it couldn't notice process death instantly
+            timer:sleep(?SLEEPTIME),
+            couch_stats_collector:get(hoopla)
+        end).
+
+should_decrement_for_each_track_process_count_call_on_exit() ->
+    ?_assertEqual(2,
+        begin
+            {_, 2} = spawn_and_count(2),
+            {Pid, 6} = spawn_and_count(4),
+            RefMon = erlang:monitor(process, Pid),
+            Pid ! sepuku,
+            receive
+                {'DOWN', RefMon, _, _, _} -> ok
+            after ?TIMEOUT ->
+                throw(timeout)
+            end,
+            timer:sleep(?SLEEPTIME),
+            couch_stats_collector:get(hoopla)
+        end).
+
+should_return_all_counters_and_absolute_values() ->
+    ?_assertEqual([{bar,[1.0,0.0]}, {foo,1}],
+        begin
+            couch_stats_collector:record(bar, 0.0),
+            couch_stats_collector:record(bar, 1.0),
+            couch_stats_collector:increment(foo),
+            lists:sort(couch_stats_collector:all())
+        end).
+
+should_return_incremental_counters() ->
+    ?_assertEqual([{foo,1}],
+        begin
+            couch_stats_collector:record(bar, 0.0),
+            couch_stats_collector:record(bar, 1.0),
+            couch_stats_collector:increment(foo),
+            lists:sort(couch_stats_collector:all(incremental))
+        end).
+
+should_return_absolute_values() ->
+    ?_assertEqual([{bar,[1.0,0.0]}, {zing, "Z"}],
+        begin
+            couch_stats_collector:record(bar, 0.0),
+            couch_stats_collector:record(bar, 1.0),
+            couch_stats_collector:record(zing, 90),
+            couch_stats_collector:increment(foo),
+            lists:sort(couch_stats_collector:all(absolute))
+        end).
+
+
+spawn_and_count(N) ->
+    Self = self(),
+    Pid = spawn(fun() ->
+        lists:foreach(
+            fun(_) ->
+                couch_stats_collector:track_process_count(hoopla)
+            end, lists:seq(1,N)),
+        Self ! reporting,
+        receive
+            sepuku -> ok
+        end
+    end),
+    receive reporting -> ok end,
+    {Pid, couch_stats_collector:get(hoopla)}.
+
+repeat(_, 0) ->
+    ok;
+repeat(Fun, Count) ->
+    Fun(),
+    repeat(Fun, Count-1).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/39b0ec1a/test/etap/120-stats-collect.t
----------------------------------------------------------------------
diff --git a/test/etap/120-stats-collect.t b/test/etap/120-stats-collect.t
deleted file mode 100755
index a30f9ac..0000000
--- a/test/etap/120-stats-collect.t
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    etap:plan(11),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail()
-    end,
-    ok.
-
-test() ->
-    couch_stats_collector:start(),
-    ok = test_counters(),
-    ok = test_abs_values(),
-    ok = test_proc_counting(),
-    ok = test_all(),
-    ok.
-
-test_counters() ->
-    AddCount = fun() -> couch_stats_collector:increment(foo) end,
-    RemCount = fun() -> couch_stats_collector:decrement(foo) end,
-    repeat(AddCount, 100),
-    repeat(RemCount, 25),
-    repeat(AddCount, 10),
-    repeat(RemCount, 5),
-    etap:is(
-        couch_stats_collector:get(foo),
-        80,
-        "Incrememnt tracks correctly."
-    ),
-
-    repeat(RemCount, 80),
-    etap:is(
-        couch_stats_collector:get(foo),
-        0,
-        "Decremented to zaro."
-    ),
-    ok.
-
-test_abs_values() ->
-    lists:map(fun(Val) ->
-        couch_stats_collector:record(bar, Val)
-    end, lists:seq(1, 15)),
-    etap:is(
-        couch_stats_collector:get(bar),
-        lists:seq(1, 15),
-        "Absolute values are recorded correctly."
-    ),
-    
-    couch_stats_collector:clear(bar),
-    etap:is(
-        couch_stats_collector:get(bar),
-        nil,
-        "Absolute values are cleared correctly."
-    ),
-    ok.
-
-test_proc_counting() ->
-    Self = self(),
-    OnePid = spawn(fun() ->
-        couch_stats_collector:track_process_count(hoopla),
-        Self ! reporting,
-        receive sepuku -> ok end
-    end),
-    R1 = erlang:monitor(process, OnePid),
-    receive reporting -> ok end,
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        1,
-        "track_process_count increments the counter."
-    ),
-    
-    TwicePid = spawn(fun() ->
-        couch_stats_collector:track_process_count(hoopla),
-        couch_stats_collector:track_process_count(hoopla),
-        Self ! reporting,
-        receive sepuku -> ok end
-    end),
-    R2 = erlang:monitor(process, TwicePid),
-    receive reporting -> ok end,
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        3,
-        "track_process_count allows more than one incrememnt per Pid"
-    ),
-    
-    OnePid ! sepuku,
-    receive {'DOWN', R1, _, _, _} -> ok end,
-    timer:sleep(250),
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        2,
-        "Process count is decremented when process exits."
-    ),
-    
-    TwicePid ! sepuku,
-    receive {'DOWN', R2, _, _, _} -> ok end,
-    timer:sleep(250),
-    etap:is(
-        couch_stats_collector:get(hoopla),
-        0,
-        "Process count is decremented for each call to track_process_count."
-    ),
-    ok.
-
-test_all() ->
-    couch_stats_collector:record(bar, 0.0),
-    couch_stats_collector:record(bar, 1.0),
-    etap:is(
-        lists:sort(couch_stats_collector:all()),
-        [ {bar,[1.0,0.0]}, {foo,0}, { hoopla,0} ],
-        "all/0 returns all counters and absolute values."
-    ),
-    
-    etap:is(
-        lists:sort(couch_stats_collector:all(incremental)),
-        [ {foo, 0}, {hoopla, 0} ],
-        "all/1 returns only the specified type."
-    ),
-    
-    couch_stats_collector:record(zing, 90),
-    etap:is(
-        lists:sort(couch_stats_collector:all(absolute)),
-        [ {bar,[1.0,0.0]}, {zing,"Z"} ],
-        "all/1 returns only the specified type."
-    ),
-    ok.
-
-repeat(_, 0) ->
-    ok;
-repeat(Fun, Count) ->
-    Fun(),
-    repeat(Fun, Count-1).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/39b0ec1a/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 2e38ee5..216aa78 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    120-stats-collect.t \
     121-stats-aggregates.cfg \
     121-stats-aggregates.ini \
     121-stats-aggregates.t \


[29/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 130-attachments-md5.t etap test suite to eunit

Add random document id generator macros.
Have to use handmade http client instead of ibrowse since it makes too
complicated sending chunked requests.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/f9199df3
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/f9199df3
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/f9199df3

Branch: refs/heads/1963-eunit
Commit: f9199df3d38c6dd60d7b96f9c4bcd8e2be1ab125
Parents: ac9379a
Author: Alexander Shorin <kx...@apache.org>
Authored: Wed May 28 05:32:11 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:52:41 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                   |   1 +
 test/couchdb/couchdb_attachments_tests.erl | 278 ++++++++++++++++++++++++
 test/couchdb/couchdb_tests.hrl.in          |   5 +
 test/etap/130-attachments-md5.t            | 248 ---------------------
 test/etap/Makefile.am                      |   1 -
 5 files changed, 284 insertions(+), 249 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/f9199df3/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index c047aa4..563a916 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -37,6 +37,7 @@ eunit_files = \
     couch_task_status_tests.erl \
     couch_ref_counter_tests.erl \
     couch_stats_tests.erl \
+    couchdb_attachments_tests.erl \
     test_request.erl \
     couchdb_tests.hrl
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f9199df3/test/couchdb/couchdb_attachments_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_attachments_tests.erl b/test/couchdb/couchdb_attachments_tests.erl
new file mode 100644
index 0000000..d5a49ee
--- /dev/null
+++ b/test/couchdb/couchdb_attachments_tests.erl
@@ -0,0 +1,278 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_attachments_tests).
+
+-include_lib("../../src/couchdb/couch_db.hrl").
+-include_lib("couchdb_tests.hrl").
+
+-define(TIMEWAIT, 100).
+-define(i2l(I), integer_to_list(I)).
+
+
+start() ->
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    % disable logging to reduce noise in stdout
+    couch_config:set("log", "level", "none", false),
+    ok.
+
+stop(_) ->
+    couch_server_sup:stop(),
+    ok.
+
+setup() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, []),
+    ok = couch_db:close(Db),
+    Addr = couch_config:get("httpd", "bind_address", any),
+    Port = mochiweb_socket_server:get(couch_httpd, port),
+    Host = Addr ++ ":" ++ ?i2l(Port),
+    {Host, ?b2l(DbName)}.
+
+teardown({_, DbName}) ->
+    ok = couch_server:delete(?l2b(DbName), []),
+    ok.
+
+
+attachments_test_() ->
+    {
+        "Attachments tests",
+        {
+            setup,
+            fun start/0, fun stop/1,
+            [
+                attachments_md5_tests()
+            ]
+        }
+    }.
+
+attachments_md5_tests() ->
+    {
+        "Attachments MD5 tests",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_upload_attachment_without_md5/1,
+                fun should_upload_attachment_by_chunks_without_md5/1,
+                fun should_upload_attachment_with_valid_md5_header/1,
+                fun should_upload_attachment_by_chunks_with_valid_md5_header/1,
+                fun should_upload_attachment_by_chunks_with_valid_md5_trailer/1,
+                fun should_reject_attachment_with_invalid_md5/1,
+                fun should_reject_chunked_attachment_with_invalid_md5/1,
+                fun should_reject_chunked_attachment_with_invalid_md5_trailer/1
+            ]
+        }
+    }.
+
+
+should_upload_attachment_without_md5({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            Body = "We all live in a yellow submarine!",
+            Headers = [
+                {"Content-Length", "34"},
+                {"Content-Type", "text/plain"},
+                {"Host", Host}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(201, Code),
+            ?assertEqual(true, get_json(Json, [<<"ok">>])),
+            true
+        end).
+
+should_upload_attachment_by_chunks_without_md5({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            AttData = <<"We all live in a yellow submarine!">>,
+            <<Part1:21/binary, Part2:13/binary>> = AttData,
+            Body = chunked_body([Part1, Part2]),
+            Headers = [
+                {"Content-Type", "text/plain"},
+                {"Transfer-Encoding", "chunked"},
+                {"Host", Host}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(201, Code),
+            ?assertEqual(true, get_json(Json, [<<"ok">>])),
+            true
+        end).
+
+should_upload_attachment_with_valid_md5_header({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            Body = "We all live in a yellow submarine!",
+            Headers = [
+                {"Content-Length", "34"},
+                {"Content-Type", "text/plain"},
+                {"Content-MD5", ?b2l(base64:encode(couch_util:md5(Body)))},
+                {"Host", Host}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(201, Code),
+            ?assertEqual(true, get_json(Json, [<<"ok">>])),
+            true
+        end).
+
+should_upload_attachment_by_chunks_with_valid_md5_header({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            AttData = <<"We all live in a yellow submarine!">>,
+            <<Part1:21/binary, Part2:13/binary>> = AttData,
+            Body = chunked_body([Part1, Part2]),
+            Headers = [
+                {"Content-Type", "text/plain"},
+                {"Content-MD5", ?b2l(base64:encode(couch_util:md5(AttData)))},
+                {"Host", Host},
+                {"Transfer-Encoding", "chunked"}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(201, Code),
+            ?assertEqual(true, get_json(Json, [<<"ok">>])),
+            true
+        end).
+
+should_upload_attachment_by_chunks_with_valid_md5_trailer({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            AttData = <<"We all live in a yellow submarine!">>,
+            <<Part1:21/binary, Part2:13/binary>> = AttData,
+            Body = [chunked_body([Part1, Part2]),
+                    "Content-MD5: ", base64:encode(couch_util:md5(AttData)),
+                    "\r\n"],
+            Headers = [
+                {"Content-Type", "text/plain"},
+                {"Host", Host},
+                {"Trailer", "Content-MD5"},
+                {"Transfer-Encoding", "chunked"}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(201, Code),
+            ?assertEqual(true, get_json(Json, [<<"ok">>])),
+            true
+        end).
+
+should_reject_attachment_with_invalid_md5({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            Body = "We all live in a yellow submarine!",
+            Headers = [
+                {"Content-Length", "34"},
+                {"Content-Type", "text/plain"},
+                {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))},
+                {"Host", Host}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(400, Code),
+            ?assertEqual(<<"content_md5_mismatch">>,
+                         get_json(Json, [<<"error">>])),
+            true
+        end).
+
+
+should_reject_chunked_attachment_with_invalid_md5({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            AttData = <<"We all live in a yellow submarine!">>,
+            <<Part1:21/binary, Part2:13/binary>> = AttData,
+            Body = chunked_body([Part1, Part2]),
+            Headers = [
+                {"Content-Type", "text/plain"},
+                {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))},
+                {"Host", Host},
+                {"Transfer-Encoding", "chunked"}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(400, Code),
+            ?assertEqual(<<"content_md5_mismatch">>,
+                         get_json(Json, [<<"error">>])),
+            true
+        end).
+
+should_reject_chunked_attachment_with_invalid_md5_trailer({Host, DbName}) ->
+    ?_assert(
+        begin
+            AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+            AttData = <<"We all live in a yellow submarine!">>,
+            <<Part1:21/binary, Part2:13/binary>> = AttData,
+            Body = [chunked_body([Part1, Part2]),
+                    "Content-MD5: ", base64:encode(<<"foobar!">>),
+                    "\r\n"],
+            Headers = [
+                {"Content-Type", "text/plain"},
+                {"Host", Host},
+                {"Trailer", "Content-MD5"},
+                {"Transfer-Encoding", "chunked"}
+            ],
+            {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+            ?assertEqual(400, Code),
+            ?assertEqual(<<"content_md5_mismatch">>,
+                         get_json(Json, [<<"error">>])),
+            true
+        end).
+
+
+get_json(Json, Path) ->
+    couch_util:get_nested_json_value(Json, Path).
+
+to_hex(Val) ->
+    to_hex(Val, []).
+
+to_hex(0, Acc) ->
+    Acc;
+to_hex(Val, Acc) ->
+    to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+
+chunked_body(Chunks) ->
+    chunked_body(Chunks, []).
+
+chunked_body([], Acc) ->
+    iolist_to_binary(lists:reverse(Acc, "0\r\n"));
+chunked_body([Chunk | Rest], Acc) ->
+    Size = to_hex(size(Chunk)),
+    chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
+
+get_socket() ->
+    Options = [binary, {packet, 0}, {active, false}],
+    Addr = couch_config:get("httpd", "bind_address", any),
+    Port = mochiweb_socket_server:get(couch_httpd, port),
+    {ok, Sock} = gen_tcp:connect(Addr, Port, Options),
+    Sock.
+
+request(Method, Url, Headers, Body) ->
+    RequestHead = [Method, " ", Url, " HTTP/1.1"],
+    RequestHeaders = [[string:join([Key, Value], ": "), "\r\n"]
+                      || {Key, Value} <- Headers],
+    Request = [RequestHead, "\r\n", RequestHeaders, "\r\n", Body, "\r\n"],
+    Sock = get_socket(),
+    gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
+    timer:sleep(?TIMEWAIT),  % must wait to receive complete response
+    {ok, R} = gen_tcp:recv(Sock, 0),
+    gen_tcp:close(Sock),
+    [Header, Body1] = re:split(R, "\r\n\r\n", [{return, binary}]),
+    {ok, {http_response, _, Code, _}, _} =
+        erlang:decode_packet(http, Header, []),
+    Json = ejson:decode(Body1),
+    {ok, Code, Json}.
+
+
+

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f9199df3/test/couchdb/couchdb_tests.hrl.in
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_tests.hrl.in b/test/couchdb/couchdb_tests.hrl.in
index 78d537f..cdb0346 100644
--- a/test/couchdb/couchdb_tests.hrl.in
+++ b/test/couchdb/couchdb_tests.hrl.in
@@ -37,3 +37,8 @@
             Suffix = lists:concat([integer_to_list(Num) || Num <- Nums]),
             list_to_binary(Prefix ++ "-" ++ Suffix)
     end).
+-define(docid,
+    fun() ->
+        {A, B, C} = erlang:now(),
+        lists:flatten(io_lib:format("~p~p~p", [A, B, C]))
+    end).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f9199df3/test/etap/130-attachments-md5.t
----------------------------------------------------------------------
diff --git a/test/etap/130-attachments-md5.t b/test/etap/130-attachments-md5.t
deleted file mode 100755
index a91c9bf..0000000
--- a/test/etap/130-attachments-md5.t
+++ /dev/null
@@ -1,248 +0,0 @@
-#!/usr/bin/env escript
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-test_db_name() ->
-    <<"etap-test-db">>.
-
-docid() ->
-    case get(docid) of
-        undefined ->
-            put(docid, 1),
-            "1";
-        Count ->
-            put(docid, Count+1),
-            integer_to_list(Count+1)
-    end.
-
-main(_) ->
-    test_util:init_code_path(),
-    
-    etap:plan(16),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_server_sup:start_link(test_util:config_files()),
-    Addr = couch_config:get("httpd", "bind_address", any),
-    put(addr, Addr),
-    put(port, mochiweb_socket_server:get(couch_httpd, port)),
-    timer:sleep(1000),
-
-    couch_server:delete(test_db_name(), []),
-    couch_db:create(test_db_name(), []),
-
-    test_identity_without_md5(),
-    test_chunked_without_md5(),
-
-    test_identity_with_valid_md5(),
-    test_chunked_with_valid_md5_header(),
-    test_chunked_with_valid_md5_trailer(),
-
-    test_identity_with_invalid_md5(),
-    test_chunked_with_invalid_md5_header(),
-    test_chunked_with_invalid_md5_trailer(),
-
-    couch_server:delete(test_db_name(), []),
-    couch_server_sup:stop(),
-    ok.
-
-test_identity_without_md5() ->
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Content-Length: 34\r\n",
-        "\r\n",
-        "We all live in a yellow submarine!"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with identity encoding and no MD5"),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_chunked_without_md5() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n"
-        "0\r\n"
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with chunked encoding and no MD5"),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_identity_with_valid_md5() ->
-    AttData = "We all live in a yellow submarine!",
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Content-Length: 34\r\n",
-        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
-        "\r\n",
-        AttData],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with identity encoding and valid MD5"),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_chunked_with_valid_md5_header() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with chunked encoding and valid MD5 header."),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_chunked_with_valid_md5_trailer() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Trailer: Content-MD5\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "Content-MD5: ", base64:encode(couch_util:md5(AttData)), "\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 201, "Stored with chunked encoding and valid MD5 trailer."),
-    etap:is(get_json(Json, [<<"ok">>]), true, "Body indicates success.").
-
-test_identity_with_invalid_md5() ->
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Content-Length: 34\r\n",
-        "Content-MD5: ", base64:encode(<<"foobar!">>), "\r\n",
-        "\r\n",
-        "We all live in a yellow submarine!"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 400, "Invalid MD5 header causes an error: identity"),
-    etap:is(
-        get_json(Json, [<<"error">>]),
-        <<"content_md5_mismatch">>,
-        "Body indicates reason for failure."
-    ).
-
-test_chunked_with_invalid_md5_header() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Content-MD5: ", base64:encode(<<"so sneaky...">>), "\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 400, "Invalid MD5 header causes an error: chunked"),
-    etap:is(
-        get_json(Json, [<<"error">>]),
-        <<"content_md5_mismatch">>,
-        "Body indicates reason for failure."
-    ).
-
-test_chunked_with_invalid_md5_trailer() ->
-    AttData = <<"We all live in a yellow submarine!">>,
-    <<Part1:21/binary, Part2:13/binary>> = AttData,
-    Data = [
-        "PUT /", test_db_name(), "/", docid(), "/readme.txt HTTP/1.1\r\n",
-        "Content-Type: text/plain\r\n",
-        "Transfer-Encoding: chunked\r\n",
-        "Trailer: Content-MD5\r\n",
-        "\r\n",
-        to_hex(size(Part1)), "\r\n",
-        Part1, "\r\n",
-        to_hex(size(Part2)), "\r\n",
-        Part2, "\r\n",
-        "0\r\n",
-        "Content-MD5: ", base64:encode(<<"Kool-Aid Fountain!">>), "\r\n",
-        "\r\n"],
-
-    {Code, Json} = do_request(Data),
-    etap:is(Code, 400, "Invalid MD5 Trailer causes an error"),
-    etap:is(
-        get_json(Json, [<<"error">>]),
-        <<"content_md5_mismatch">>,
-        "Body indicates reason for failure."
-    ).
-
-
-get_socket() ->
-    Options = [binary, {packet, 0}, {active, false}],
-    {ok, Sock} = gen_tcp:connect(get(addr), get(port), Options),
-    Sock.
-
-do_request(Request) ->
-    Sock = get_socket(),
-    gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
-    timer:sleep(1000),
-    {ok, R} = gen_tcp:recv(Sock, 0),
-    gen_tcp:close(Sock),
-    [Header, Body] = re:split(R, "\r\n\r\n", [{return, binary}]),
-    {ok, {http_response, _, Code, _}, _} =
-        erlang:decode_packet(http, Header, []),
-    Json = ejson:decode(Body),
-    {Code, Json}.
-
-get_json(Json, Path) ->
-    couch_util:get_nested_json_value(Json, Path).
-
-to_hex(Val) ->
-    to_hex(Val, []).
-
-to_hex(0, Acc) ->
-    Acc;
-to_hex(Val, Acc) ->
-    to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
-
-hex_char(V) when V < 10 -> $0 + V;
-hex_char(V) -> $A + V - 10.
-

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f9199df3/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index abe252d..f54e927 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    130-attachments-md5.t \
     140-attachment-comp.t \
     150-invalid-view-seq.t \
     160-vhosts.t \


[11/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 041-uuid.t etap test suite to eunit

Config files are removed in favor of using couch_config API instead.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/f33624f6
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/f33624f6
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/f33624f6

Branch: refs/heads/1963-eunit
Commit: f33624f6e8c229f977f250023b2bb88ca9067e95
Parents: e297472
Author: Alexander Shorin <kx...@apache.org>
Authored: Sat May 17 04:23:25 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 15:14:25 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am           |   1 +
 test/couchdb/couch_uuids_tests.erl | 158 ++++++++++++++++++++++++++++++++
 test/etap/041-uuid-gen-id.ini      |  20 ----
 test/etap/041-uuid-gen-seq.ini     |  19 ----
 test/etap/041-uuid-gen-utc.ini     |  19 ----
 test/etap/041-uuid-gen.t           | 147 -----------------------------
 test/etap/Makefile.am              |   4 -
 7 files changed, 159 insertions(+), 209 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index c67fe95..e7d13ff 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -22,6 +22,7 @@ eunit_files = \
     couch_file_tests.erl \
     couch_btree_tests.erl \
     couch_doc_json_tests.erl \
+    couch_uuids_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/couchdb/couch_uuids_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_uuids_tests.erl b/test/couchdb/couch_uuids_tests.erl
new file mode 100644
index 0000000..3e8b014
--- /dev/null
+++ b/test/couchdb/couch_uuids_tests.erl
@@ -0,0 +1,158 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_uuids_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+
+setup() ->
+    {ok, Pid} = couch_config:start_link(?CONFIG_CHAIN),
+    erlang:monitor(process, Pid),
+    couch_uuids:start(),
+    Pid.
+
+setup(Opts) ->
+    Pid = setup(),
+    lists:foreach(
+        fun({Option, Value}) ->
+            couch_config:set("uuids", Option, Value, false)
+        end, Opts),
+    Pid.
+
+teardown(Pid) ->
+    couch_uuids:stop(),
+    couch_config:stop(),
+    receive
+        {'DOWN', _, _, Pid, _} -> ok
+    after
+        1000 -> throw({timeout_error, config_stop})
+    end.
+
+teardown(_, Pid) ->
+    teardown(Pid).
+
+
+default_test_() ->
+    {
+        "Default UUID algorithm",
+        {
+            setup,
+            fun setup/0, fun teardown/1,
+            fun should_be_unique/1
+        }
+    }.
+
+sequential_test_() ->
+    Opts = [{"algorithm", "sequential"}],
+    Cases = [
+        fun should_be_unique/2,
+        fun should_increment_monotonically/2,
+        fun should_rollover/2
+    ],
+    {
+        "UUID algorithm: sequential",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{Opts, Fun} || Fun <- Cases]
+        }
+    }.
+
+utc_test_() ->
+    Opts = [{"algorithm", "utc_random"}],
+    Cases = [
+        fun should_be_unique/2,
+        fun should_increment_monotonically/2
+    ],
+    {
+        "UUID algorithm: utc_random",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{Opts, Fun} || Fun <- Cases]
+        }
+    }.
+
+utc_id_suffix_test_() ->
+    Opts = [{"algorithm", "utc_id"}, {"utc_id_suffix", "bozo"}],
+    Cases = [
+        fun should_be_unique/2,
+        fun should_increment_monotonically/2,
+        fun should_preserve_suffix/2
+    ],
+    {
+        "UUID algorithm: utc_id",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{Opts, Fun} || Fun <- Cases]
+        }
+    }.
+
+
+should_be_unique() ->
+    ?_assert(test_unique(10000, [couch_uuids:new()])).
+should_be_unique(_) ->
+    should_be_unique().
+should_be_unique(_, _) ->
+    should_be_unique().
+
+should_increment_monotonically(_, _) ->
+    ?_assert(couch_uuids:new() < couch_uuids:new()).
+
+should_rollover(_, _) ->
+    ?_test(begin
+        UUID = binary_to_list(couch_uuids:new()),
+        Prefix = element(1, lists:split(26, UUID)),
+        N = gen_until_pref_change(Prefix, 0),
+        ?assert(N >= 5000 andalso N =< 11000)
+    end).
+
+should_preserve_suffix(_, _) ->
+    ?_test(begin
+        UUID = binary_to_list(couch_uuids:new()),
+        Suffix = get_suffix(UUID),
+        ?assert(test_same_suffix(10000, Suffix))
+    end).
+
+
+test_unique(0, _) ->
+    true;
+test_unique(N, UUIDs) ->
+    UUID = couch_uuids:new(),
+    ?assertNot(lists:member(UUID, UUIDs)),
+    test_unique(N - 1, [UUID| UUIDs]).
+
+get_prefix(UUID) ->
+    element(1, lists:split(26, binary_to_list(UUID))).
+
+gen_until_pref_change(_, Count) when Count > 8251 ->
+    Count;
+gen_until_pref_change(Prefix, N) ->
+    case get_prefix(couch_uuids:new()) of
+        Prefix -> gen_until_pref_change(Prefix, N + 1);
+        _ -> N
+    end.
+
+get_suffix(UUID) when is_binary(UUID) ->
+    get_suffix(binary_to_list(UUID));
+get_suffix(UUID) ->
+    element(2, lists:split(14, UUID)).
+
+test_same_suffix(0, _) ->
+    true;
+test_same_suffix(N, Suffix) ->
+    case get_suffix(couch_uuids:new()) of
+        Suffix -> test_same_suffix(N - 1, Suffix);
+        _ -> false
+    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/etap/041-uuid-gen-id.ini
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen-id.ini b/test/etap/041-uuid-gen-id.ini
deleted file mode 100644
index 6886efd..0000000
--- a/test/etap/041-uuid-gen-id.ini
+++ /dev/null
@@ -1,20 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-;
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[uuids]
-algorithm = utc_id
-utc_id_suffix = bozo

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/etap/041-uuid-gen-seq.ini
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen-seq.ini b/test/etap/041-uuid-gen-seq.ini
deleted file mode 100644
index 94cebc6..0000000
--- a/test/etap/041-uuid-gen-seq.ini
+++ /dev/null
@@ -1,19 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[uuids]
-algorithm = sequential

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/etap/041-uuid-gen-utc.ini
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen-utc.ini b/test/etap/041-uuid-gen-utc.ini
deleted file mode 100644
index c2b8383..0000000
--- a/test/etap/041-uuid-gen-utc.ini
+++ /dev/null
@@ -1,19 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-; 
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[uuids]
-algorithm = utc_random

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/etap/041-uuid-gen.t
----------------------------------------------------------------------
diff --git a/test/etap/041-uuid-gen.t b/test/etap/041-uuid-gen.t
deleted file mode 100755
index 7234969..0000000
--- a/test/etap/041-uuid-gen.t
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-default_config() ->
-    test_util:build_file("etc/couchdb/default_dev.ini").
-
-seq_alg_config() ->
-    test_util:source_file("test/etap/041-uuid-gen-seq.ini").
-
-utc_alg_config() ->
-    test_util:source_file("test/etap/041-uuid-gen-utc.ini").
-
-utc_id_alg_config() ->
-    test_util:source_file("test/etap/041-uuid-gen-id.ini").
-
-% Run tests and wait for the gen_servers to shutdown
-run_test(IniFiles, Test) ->
-    {ok, Pid} = couch_config:start_link(IniFiles),
-    erlang:monitor(process, Pid),
-    couch_uuids:start(),
-    Test(),
-    couch_uuids:stop(),
-    couch_config:stop(),
-    receive
-        {'DOWN', _, _, Pid, _} -> ok;
-        _Other -> etap:diag("OTHER: ~p~n", [_Other])
-    after
-        1000 -> throw({timeout_error, config_stop})
-    end.
-
-main(_) ->
-    test_util:init_code_path(),
-    application:start(crypto),
-    etap:plan(9),
-
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-
-    TestUnique = fun() ->
-        etap:is(
-            test_unique(10000, couch_uuids:new()),
-            true,
-            "Can generate 10K unique IDs"
-        )
-    end,
-    run_test([default_config()], TestUnique),
-    run_test([default_config(), seq_alg_config()], TestUnique),
-    run_test([default_config(), utc_alg_config()], TestUnique),
-    run_test([default_config(), utc_id_alg_config()], TestUnique),
-
-    TestMonotonic = fun () ->
-        etap:is(
-            couch_uuids:new() < couch_uuids:new(),
-            true,
-            "should produce monotonically increasing ids"
-        )
-    end,
-    run_test([default_config(), seq_alg_config()], TestMonotonic),
-    run_test([default_config(), utc_alg_config()], TestMonotonic),
-    run_test([default_config(), utc_id_alg_config()], TestMonotonic),
-
-    % Pretty sure that the average of a uniform distribution is the
-    % midpoint of the range. Thus, to exceed a threshold, we need
-    % approximately Total / (Range/2 + RangeMin) samples.
-    %
-    % In our case this works out to be 8194. (0xFFF000 / 0x7FF)
-    % These tests just fudge the limits for a good generator at 25%
-    % in either direction. Technically it should be possible to generate
-    % bounds that will show if your random number generator is not
-    % sufficiently random but I hated statistics in school.
-    TestRollOver = fun() ->
-        UUID = binary_to_list(couch_uuids:new()),
-        Prefix = element(1, lists:split(26, UUID)),
-        N = gen_until_pref_change(Prefix,0),
-        etap:diag("N is: ~p~n",[N]),                           
-        etap:is(
-            N >= 5000 andalso N =< 11000,
-            true,
-            "should roll over every so often."
-        )
-    end,
-    run_test([default_config(), seq_alg_config()], TestRollOver),
-
-    TestSuffix = fun() ->
-        UUID = binary_to_list(couch_uuids:new()),
-        Suffix = get_suffix(UUID),
-        etap:is(
-            test_same_suffix(100, Suffix),
-            true,
-            "utc_id ids should have the same suffix."
-        )
-    end,
-    run_test([default_config(), utc_id_alg_config()], TestSuffix).
-
-test_unique(0, _) ->
-    true;
-test_unique(N, UUID) ->
-    case couch_uuids:new() of
-        UUID ->
-            etap:diag("N: ~p~n", [N]),
-            false;
-        Else -> test_unique(N-1, Else)
-    end.
-
-get_prefix(UUID) ->
-    element(1, lists:split(26, binary_to_list(UUID))).
-
-gen_until_pref_change(_, Count) when Count > 8251 ->
-    Count;
-gen_until_pref_change(Prefix, N) ->
-    case get_prefix(couch_uuids:new()) of
-        Prefix -> gen_until_pref_change(Prefix, N+1);
-        _ -> N
-    end.
-
-get_suffix(UUID) when is_binary(UUID)->
-    get_suffix(binary_to_list(UUID));
-get_suffix(UUID) ->
-    element(2, lists:split(14, UUID)).
-
-test_same_suffix(0, _) ->
-    true;
-test_same_suffix(N, Suffix) ->
-    case get_suffix(couch_uuids:new()) of
-        Suffix -> test_same_suffix(N-1, Suffix);
-        _ -> false
-    end.

http://git-wip-us.apache.org/repos/asf/couchdb/blob/f33624f6/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 64570a0..f45853a 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,10 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    041-uuid-gen-id.ini \
-    041-uuid-gen-seq.ini \
-    041-uuid-gen-utc.ini \
-    041-uuid-gen.t \
     042-work-queue.t \
     043-find-in-binary.t \
     050-stream.t \


[34/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 171-os-daemons-config.t etap test suite to eunit

Merged into couchdb_os_daemons_tests suite.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/21a1c8bf
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/21a1c8bf
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/21a1c8bf

Branch: refs/heads/1963-eunit
Commit: 21a1c8bf0fcb161cecbd34c615ed3522878c34ac
Parents: f55d2c2
Author: Alexander Shorin <kx...@apache.org>
Authored: Tue Jun 3 14:10:25 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 19:53:11 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am                        |   1 +
 test/couchdb/couch_auth_cache_tests.erl         |  40 +++-----
 test/couchdb/couchdb_os_daemons_tests.erl       |  21 ++++
 .../couchdb/fixtures/os_daemon_configer.escript | 101 +++++++++++++++++++
 test/etap/171-os-daemons-config.es              |  85 ----------------
 test/etap/171-os-daemons-config.t               |  74 --------------
 test/etap/Makefile.am                           |   2 -
 7 files changed, 139 insertions(+), 185 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 7dd0011..cb66c85 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -49,6 +49,7 @@ fixture_files = \
     fixtures/couch_stats_aggregates.cfg \
     fixtures/couch_stats_aggregates.ini \
     fixtures/os_daemon_looper.escript \
+    fixtures/os_daemon_configer.escript \
     fixtures/logo.png
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/couchdb/couch_auth_cache_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_auth_cache_tests.erl b/test/couchdb/couch_auth_cache_tests.erl
index 66b4c0c..bd4cac0 100644
--- a/test/couchdb/couch_auth_cache_tests.erl
+++ b/test/couchdb/couch_auth_cache_tests.erl
@@ -67,47 +67,43 @@ should_get_nil_on_missed_cache(_) ->
     ?_assertEqual(nil, couch_auth_cache:get_user_creds("joe")).
 
 should_get_right_password_hash(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         PasswordHash = hash_password("pass1"),
         {ok, _} = update_user_doc(DbName, "joe", "pass1"),
         Creds = couch_auth_cache:get_user_creds("joe"),
         ?assertEqual(PasswordHash,
-                      couch_util:get_value(<<"password_sha">>, Creds)),
-        true
+                      couch_util:get_value(<<"password_sha">>, Creds))
     end).
 
 should_ensure_doc_hash_equals_cached_one(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         {ok, _} = update_user_doc(DbName, "joe", "pass1"),
         Creds = couch_auth_cache:get_user_creds("joe"),
 
         CachedHash = couch_util:get_value(<<"password_sha">>, Creds),
         StoredHash = get_user_doc_password_sha(DbName, "joe"),
-        ?assertEqual(StoredHash, CachedHash),
-        true
+        ?assertEqual(StoredHash, CachedHash)
     end).
 
 should_update_password(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         PasswordHash = hash_password("pass2"),
         {ok, Rev} = update_user_doc(DbName, "joe", "pass1"),
         {ok, _} = update_user_doc(DbName, "joe", "pass2", Rev),
         Creds = couch_auth_cache:get_user_creds("joe"),
         ?assertEqual(PasswordHash,
-                      couch_util:get_value(<<"password_sha">>, Creds)),
-        true
+                      couch_util:get_value(<<"password_sha">>, Creds))
     end).
 
 should_cleanup_cache_after_userdoc_deletion(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         {ok, _} = update_user_doc(DbName, "joe", "pass1"),
         delete_user_doc(DbName, "joe"),
-        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe")),
-        true
+        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe"))
     end).
 
 should_restore_cache_after_userdoc_recreation(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         PasswordHash = hash_password("pass5"),
         {ok, _} = update_user_doc(DbName, "joe", "pass1"),
         delete_user_doc(DbName, "joe"),
@@ -117,22 +113,20 @@ should_restore_cache_after_userdoc_recreation(DbName) ->
         Creds = couch_auth_cache:get_user_creds("joe"),
 
         ?assertEqual(PasswordHash,
-                      couch_util:get_value(<<"password_sha">>, Creds)),
-        true
+                      couch_util:get_value(<<"password_sha">>, Creds))
     end).
 
 should_drop_cache_on_auth_db_change(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         {ok, _} = update_user_doc(DbName, "joe", "pass1"),
         full_commit(DbName),
         couch_config:set("couch_httpd_auth", "authentication_db",
                          ?b2l(?tempdb()), false),
-        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe")),
-        true
+        ?assertEqual(nil, couch_auth_cache:get_user_creds("joe"))
     end).
 
 should_restore_cache_on_auth_db_change(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         PasswordHash = hash_password("pass1"),
         {ok, _} = update_user_doc(DbName, "joe", "pass1"),
         Creds = couch_auth_cache:get_user_creds("joe"),
@@ -150,20 +144,18 @@ should_restore_cache_on_auth_db_change(DbName) ->
 
         Creds = couch_auth_cache:get_user_creds("joe"),
         ?assertEqual(PasswordHash,
-                      couch_util:get_value(<<"password_sha">>, Creds)),
-        true
+                      couch_util:get_value(<<"password_sha">>, Creds))
     end).
 
 should_recover_cache_after_shutdown(DbName) ->
-    ?_assert(begin
+    ?_test(begin
         PasswordHash = hash_password("pass2"),
         {ok, Rev0} = update_user_doc(DbName, "joe", "pass1"),
         {ok, Rev1} = update_user_doc(DbName, "joe", "pass2", Rev0),
         full_commit(DbName),
         shutdown_db(DbName),
         {ok, Rev1} = get_doc_rev(DbName, "joe"),
-        ?assertEqual(PasswordHash, get_user_doc_password_sha(DbName, "joe")),
-        true
+        ?assertEqual(PasswordHash, get_user_doc_password_sha(DbName, "joe"))
     end).
 
 

http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/couchdb/couchdb_os_daemons_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couchdb_os_daemons_tests.erl b/test/couchdb/couchdb_os_daemons_tests.erl
index f3c75df..277127f 100644
--- a/test/couchdb/couchdb_os_daemons_tests.erl
+++ b/test/couchdb/couchdb_os_daemons_tests.erl
@@ -26,6 +26,7 @@
     buf=[]
 }).
 
+-define(DAEMON_CONFIGER, "os_daemon_configer.escript").
 -define(DAEMON_LOOPER, "os_daemon_looper.escript").
 -define(DELAY, 100).
 
@@ -74,6 +75,17 @@ os_daemons_test_() ->
         }
     }.
 
+configuration_reader_test_() ->
+    {
+        "OS Daemon requests CouchDB configuration",
+        {
+            foreachx,
+            fun setup/1, fun teardown/2,
+            [{?DAEMON_CONFIGER,
+              fun should_read_write_config_settings_by_daemon/2}]
+        }
+    }.
+
 
 should_check_daemon(DName, _) ->
     ?_test(begin
@@ -132,6 +144,15 @@ should_keep_alive_one_daemon_on_killing_other(DName, _) ->
         check_daemon(T, DName)
     end).
 
+should_read_write_config_settings_by_daemon(DName, _) ->
+    ?_test(begin
+        % have to wait till daemon run all his tests
+        % see daemon's script for more info
+        timer:sleep(1000),
+        {ok, [D]} = couch_os_daemons:info([table]),
+        check_daemon(D, DName)
+    end).
+
 
 check_daemon(D) ->
     check_daemon(D, D#daemon.name).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/couchdb/fixtures/os_daemon_configer.escript
----------------------------------------------------------------------
diff --git a/test/couchdb/fixtures/os_daemon_configer.escript b/test/couchdb/fixtures/os_daemon_configer.escript
new file mode 100755
index 0000000..337ed53
--- /dev/null
+++ b/test/couchdb/fixtures/os_daemon_configer.escript
@@ -0,0 +1,101 @@
+#! /usr/bin/env escript
+
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-include_lib("../couchdb_tests.hrl").
+
+
+read() ->
+    case io:get_line('') of
+        eof ->
+            stop;
+        Data ->
+            ejson:decode(Data)
+    end.
+
+write(Mesg) ->
+    Data = iolist_to_binary(ejson:encode(Mesg)),
+    io:format(binary_to_list(Data) ++ "\n", []).
+
+get_cfg(Section) ->
+    write([<<"get">>, Section]),
+    read().
+
+get_cfg(Section, Name) ->
+    write([<<"get">>, Section, Name]),
+    read().
+
+log(Mesg) ->
+    write([<<"log">>, Mesg]).
+
+log(Mesg, Level) ->
+    write([<<"log">>, Mesg, {[{<<"level">>, Level}]}]).
+
+test_get_cfg1() ->
+    Path = list_to_binary(?FILE),
+    FileName = list_to_binary(filename:basename(?FILE)),
+    {[{FileName, Path}]} = get_cfg(<<"os_daemons">>).
+
+test_get_cfg2() ->
+    Path = list_to_binary(?FILE),
+    FileName = list_to_binary(filename:basename(?FILE)),
+    Path = get_cfg(<<"os_daemons">>, FileName),
+    <<"sequential">> = get_cfg(<<"uuids">>, <<"algorithm">>).
+
+
+test_get_unknown_cfg() ->
+    {[]} = get_cfg(<<"aal;3p4">>),
+    null = get_cfg(<<"aal;3p4">>, <<"313234kjhsdfl">>).
+
+test_log() ->
+    log(<<"foobar!">>),
+    log(<<"some stuff!">>, <<"debug">>),
+    log(2),
+    log(true),
+    write([<<"log">>, <<"stuff">>, 2]),
+    write([<<"log">>, 3, null]),
+    write([<<"log">>, [1, 2], {[{<<"level">>, <<"debug">>}]}]),
+    write([<<"log">>, <<"true">>, {[]}]).
+
+do_tests() ->
+    test_get_cfg1(),
+    test_get_cfg2(),
+    test_get_unknown_cfg(),
+    test_log(),
+    loop(io:read("")).
+
+loop({ok, _}) ->
+    loop(io:read(""));
+loop(eof) ->
+    init:stop();
+loop({error, _Reason}) ->
+    init:stop().
+
+main([]) ->
+    init_code_path(),
+    couch_config:start_link(?CONFIG_CHAIN),
+    couch_drv:start_link(),
+    do_tests().
+
+init_code_path() ->
+    Paths = [
+        "couchdb",
+        "ejson",
+        "erlang-oauth",
+        "ibrowse",
+        "mochiweb",
+        "snappy"
+    ],
+    lists:foreach(fun(Name) ->
+        code:add_patha(filename:join([?BUILDDIR, "src", Name]))
+    end, Paths).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/etap/171-os-daemons-config.es
----------------------------------------------------------------------
diff --git a/test/etap/171-os-daemons-config.es b/test/etap/171-os-daemons-config.es
deleted file mode 100755
index b4a914e..0000000
--- a/test/etap/171-os-daemons-config.es
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/env escript
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-filename() ->
-    list_to_binary(test_util:source_file("test/etap/171-os-daemons-config.es")).
-
-read() ->
-    case io:get_line('') of
-        eof ->
-            stop;
-        Data ->
-            ejson:decode(Data)
-    end.
-
-write(Mesg) ->
-    Data = iolist_to_binary(ejson:encode(Mesg)),
-    io:format(binary_to_list(Data) ++ "\n", []).
-
-get_cfg(Section) ->
-    write([<<"get">>, Section]),
-    read().
-
-get_cfg(Section, Name) ->
-    write([<<"get">>, Section, Name]),
-    read().
-
-log(Mesg) ->
-    write([<<"log">>, Mesg]).
-
-log(Mesg, Level) ->
-    write([<<"log">>, Mesg, {[{<<"level">>, Level}]}]).
-
-test_get_cfg1() ->
-    FileName = filename(),
-    {[{<<"foo">>, FileName}]} = get_cfg(<<"os_daemons">>).
-
-test_get_cfg2() ->
-    FileName = filename(),
-    FileName = get_cfg(<<"os_daemons">>, <<"foo">>),
-    <<"sequential">> = get_cfg(<<"uuids">>, <<"algorithm">>).
-
-test_get_unknown_cfg() ->
-    {[]} = get_cfg(<<"aal;3p4">>),
-    null = get_cfg(<<"aal;3p4">>, <<"313234kjhsdfl">>).
-
-test_log() ->
-    log(<<"foobar!">>),
-    log(<<"some stuff!">>, <<"debug">>),
-    log(2),
-    log(true),
-    write([<<"log">>, <<"stuff">>, 2]),
-    write([<<"log">>, 3, null]),
-    write([<<"log">>, [1, 2], {[{<<"level">>, <<"debug">>}]}]),
-    write([<<"log">>, <<"true">>, {[]}]).
-
-do_tests() ->
-    test_get_cfg1(),
-    test_get_cfg2(),
-    test_get_unknown_cfg(),
-    test_log(),
-    loop(io:read("")).
-
-loop({ok, _}) ->
-    loop(io:read(""));
-loop(eof) ->
-    init:stop();
-loop({error, _Reason}) ->
-    init:stop().
-
-main([]) ->
-    test_util:init_code_path(),
-    couch_config:start_link(test_util:config_files()),
-    couch_drv:start_link(),
-    do_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/etap/171-os-daemons-config.t
----------------------------------------------------------------------
diff --git a/test/etap/171-os-daemons-config.t b/test/etap/171-os-daemons-config.t
deleted file mode 100755
index e9dc3f3..0000000
--- a/test/etap/171-os-daemons-config.t
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env escript
-%% -*- erlang -*-
-
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License.  You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--record(daemon, {
-    port,
-    name,
-    cmd,
-    kill,
-    status=running,
-    cfg_patterns=[],
-    errors=[],
-    buf=[]
-}).
-
-config_files() ->
-    lists:map(fun test_util:build_file/1, [
-        "etc/couchdb/default_dev.ini"
-    ]).
-
-daemon_cmd() ->
-    test_util:source_file("test/etap/171-os-daemons-config.es").
-
-main(_) ->
-    test_util:init_code_path(),
-
-    etap:plan(6),
-    case (catch test()) of
-        ok ->
-            etap:end_tests();
-        Other ->
-            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
-            etap:bail(Other)
-    end,
-    ok.
-
-test() ->
-    couch_config:start_link(config_files()),
-    couch_config:set("log", "level", "debug", false),
-    couch_log:start_link(),
-    couch_os_daemons:start_link(),
-
-    % "foo" is a required name by this test.
-    couch_config:set("os_daemons", "foo", daemon_cmd(), false),
-    timer:sleep(1000),
-    
-    {ok, [D1]} = couch_os_daemons:info([table]),
-    check_daemon(D1, "foo"),
-    
-    ok.
-
-check_daemon(D, Name) ->
-    BaseName = "171-os-daemons-config.es",
-    BaseLen = length(BaseName),
-    CmdLen = length(D#daemon.cmd),
-    CmdName = lists:sublist(D#daemon.cmd, CmdLen-BaseLen+1, BaseLen),
-
-    etap:is(is_port(D#daemon.port), true, "Daemon port is a port."),
-    etap:is(D#daemon.name, Name, "Daemon name was set correctly."),
-    etap:is(CmdName, BaseName, "Command name was set correctly."),
-    etap:isnt(D#daemon.kill, undefined, "Kill command was set."),
-    etap:is(D#daemon.errors, [], "No errors occurred while booting."),
-    etap:is(D#daemon.buf, [], "No extra data left in the buffer.").

http://git-wip-us.apache.org/repos/asf/couchdb/blob/21a1c8bf/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index 13b5b4a..1ec6a48 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,8 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    171-os-daemons-config.es \
-    171-os-daemons-config.t \
     172-os-daemon-errors.1.sh \
     172-os-daemon-errors.2.sh \
     172-os-daemon-errors.3.sh \


[04/36] couchdb commit: updated refs/heads/1963-eunit to 85f2750

Posted by kx...@apache.org.
Port 002-icu-driver.t etap test suite to eunit

See setup/0 comment for specific info about loading
couch_icu_driver with eunit.


Project: http://git-wip-us.apache.org/repos/asf/couchdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/couchdb/commit/d4b721ff
Tree: http://git-wip-us.apache.org/repos/asf/couchdb/tree/d4b721ff
Diff: http://git-wip-us.apache.org/repos/asf/couchdb/diff/d4b721ff

Branch: refs/heads/1963-eunit
Commit: d4b721ffd5d74a8ed559a7e225468c76c3f75522
Parents: a57684c
Author: Alexander Shorin <kx...@apache.org>
Authored: Fri May 16 05:19:05 2014 +0400
Committer: Alexander Shorin <kx...@apache.org>
Committed: Tue Jun 3 02:54:34 2014 +0400

----------------------------------------------------------------------
 test/couchdb/Makefile.am          |  1 +
 test/couchdb/couch_util_tests.erl | 58 ++++++++++++++++++++++++++++++++++
 test/etap/002-icu-driver.t        | 33 -------------------
 test/etap/Makefile.am             |  1 -
 4 files changed, 59 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/couchdb/blob/d4b721ff/test/couchdb/Makefile.am
----------------------------------------------------------------------
diff --git a/test/couchdb/Makefile.am b/test/couchdb/Makefile.am
index 4802082..8607049 100644
--- a/test/couchdb/Makefile.am
+++ b/test/couchdb/Makefile.am
@@ -18,6 +18,7 @@ all:
 
 eunit_files = \
     couchdb_modules_load_tests.erl \
+    couch_util_tests.erl \
     couchdb_tests.hrl
 
 EXTRA_DIST = \

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d4b721ff/test/couchdb/couch_util_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_util_tests.erl b/test/couchdb/couch_util_tests.erl
new file mode 100644
index 0000000..49688e2
--- /dev/null
+++ b/test/couchdb/couch_util_tests.erl
@@ -0,0 +1,58 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+
+-module(couch_util_tests).
+
+-include_lib("couchdb_tests.hrl").
+
+
+setup() ->
+    %% We cannot start driver from here since it becomes bounded to eunit
+    %% master process and the next couch_server_sup:start_link call will
+    %% fail because server couldn't load driver since it already is.
+    %%
+    %% On other hand, we cannot unload driver here due to
+    %% {error, not_loaded_by_this_process} while it is. Any ideas is welcome.
+    %%
+    couch_server_sup:start_link(?CONFIG_CHAIN),
+    %% couch_config:start_link(?CONFIG_CHAIN),
+    %% {ok, _} = couch_drv:start_link(),
+    ok.
+
+teardown(_) ->
+    couch_server_sup:stop(),
+    %% couch_config:stop(),
+    %% erl_ddll:unload_driver(couch_icu_driver),
+    ok.
+
+
+collation_test_() ->
+    {
+        "Collation tests",
+        [
+            {
+                setup,
+                fun setup/0, fun teardown/1,
+                [
+                    should_collate_ascii(),
+                    should_collate_non_ascii()
+                ]
+            }
+        ]
+    }.
+
+should_collate_ascii() ->
+    ?_assertEqual(1, couch_util:collate(<<"foo">>, <<"bar">>)).
+
+should_collate_non_ascii() ->
+    ?_assertEqual(-1, couch_util:collate(<<"A">>, <<"aa">>)).

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d4b721ff/test/etap/002-icu-driver.t
----------------------------------------------------------------------
diff --git a/test/etap/002-icu-driver.t b/test/etap/002-icu-driver.t
deleted file mode 100755
index e233533..0000000
--- a/test/etap/002-icu-driver.t
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env escript
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-main(_) ->
-    test_util:init_code_path(),
-    couch_config:start_link(test_util:config_files()),
-    etap:plan(3),
-    etap:is(
-        element(1, couch_drv:start_link()),
-        ok,
-        "Started couch_icu_driver."
-    ),
-    etap:is(
-        couch_util:collate(<<"foo">>, <<"bar">>),
-        1,
-        "Can collate stuff"
-    ),
-    etap:is(
-        couch_util:collate(<<"A">>, <<"aa">>),
-        -1,
-        "Collate's non-ascii style."
-    ),
-    etap:end_tests().

http://git-wip-us.apache.org/repos/asf/couchdb/blob/d4b721ff/test/etap/Makefile.am
----------------------------------------------------------------------
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index ff7f730..9a67ed7 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -36,7 +36,6 @@ fixture_files = \
     fixtures/test.couch
 
 tap_files = \
-    002-icu-driver.t \
     010-file-basics.t \
     011-file-headers.t \
     020-btree-basics.t \