You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by ch...@apache.org on 2014/09/05 04:30:13 UTC

[34/50] [abbrv] Move files out of test/couchdb into top level test/ folder

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_btree_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_btree_tests.erl b/test/couchdb/couch_btree_tests.erl
deleted file mode 100644
index 911640f..0000000
--- a/test/couchdb/couch_btree_tests.erl
+++ /dev/null
@@ -1,551 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_btree_tests).
-
--include("couch_eunit.hrl").
--include_lib("couchdb/couch_db.hrl").
-
--define(ROWS, 1000).
-
-
-setup() ->
-    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
-    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none},
-                                             {reduce, fun reduce_fun/2}]),
-    {Fd, Btree}.
-
-setup_kvs(_) ->
-    setup().
-
-setup_red() ->
-    {_, EvenOddKVs} = lists:foldl(
-        fun(Idx, {Key, Acc}) ->
-            case Key of
-                "even" -> {"odd", [{{Key, Idx}, 1} | Acc]};
-                _ -> {"even", [{{Key, Idx}, 1} | Acc]}
-            end
-        end, {"odd", []}, lists:seq(1, ?ROWS)),
-    {Fd, Btree} = setup(),
-    {ok, Btree1} = couch_btree:add_remove(Btree, EvenOddKVs, []),
-    {Fd, Btree1}.
-setup_red(_) ->
-    setup_red().
-
-teardown(Fd) when is_pid(Fd) ->
-    ok = couch_file:close(Fd);
-teardown({Fd, _}) ->
-    teardown(Fd).
-teardown(_, {Fd, _}) ->
-    teardown(Fd).
-
-
-kvs_test_funs() ->
-    [
-        fun should_set_fd_correctly/2,
-        fun should_set_root_correctly/2,
-        fun should_create_zero_sized_btree/2,
-        fun should_set_reduce_option/2,
-        fun should_fold_over_empty_btree/2,
-        fun should_add_all_keys/2,
-        fun should_continuously_add_new_kv/2,
-        fun should_continuously_remove_keys/2,
-        fun should_insert_keys_in_reversed_order/2,
-        fun should_add_every_odd_key_remove_every_even/2,
-        fun should_add_every_even_key_remove_every_old/2
-    ].
-
-red_test_funs() ->
-    [
-        fun should_reduce_whole_range/2,
-        fun should_reduce_first_half/2,
-        fun should_reduce_second_half/2
-    ].
-
-
-btree_open_test_() ->
-    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
-    {ok, Btree} = couch_btree:open(nil, Fd, [{compression, none}]),
-    {
-        "Ensure that created btree is really a btree record",
-        ?_assert(is_record(Btree, btree))
-    }.
-
-sorted_kvs_test_() ->
-    Funs = kvs_test_funs(),
-    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, ?ROWS)],
-    {
-        "BTree with sorted keys",
-        {
-            foreachx,
-            fun setup_kvs/1, fun teardown/2,
-            [{Sorted, Fun} || Fun <- Funs]
-        }
-    }.
-
-rsorted_kvs_test_() ->
-    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, ?ROWS)],
-    Funs = kvs_test_funs(),
-    Reversed = Sorted,
-    {
-        "BTree with backward sorted keys",
-        {
-            foreachx,
-            fun setup_kvs/1, fun teardown/2,
-            [{Reversed, Fun} || Fun <- Funs]
-        }
-    }.
-
-shuffled_kvs_test_() ->
-    Funs = kvs_test_funs(),
-    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, ?ROWS)],
-    Shuffled = shuffle(Sorted),
-    {
-        "BTree with shuffled keys",
-        {
-            foreachx,
-            fun setup_kvs/1, fun teardown/2,
-            [{Shuffled, Fun} || Fun <- Funs]
-        }
-    }.
-
-reductions_test_() ->
-    {
-        "BTree reductions",
-        [
-            {
-                "Common tests",
-                {
-                    foreach,
-                    fun setup_red/0, fun teardown/1,
-                    [
-                        fun should_reduce_without_specified_direction/1,
-                        fun should_reduce_forward/1,
-                        fun should_reduce_backward/1
-                    ]
-                }
-            },
-            {
-                "Range requests",
-                [
-                    {
-                        "Forward direction",
-                        {
-                            foreachx,
-                            fun setup_red/1, fun teardown/2,
-                            [{fwd, F} || F <- red_test_funs()]
-                        }
-                    },
-                    {
-                        "Backward direction",
-                        {
-                            foreachx,
-                            fun setup_red/1, fun teardown/2,
-                            [{rev, F} || F <- red_test_funs()]
-                        }
-                    }
-                ]
-            }
-        ]
-    }.
-
-
-should_set_fd_correctly(_, {Fd, Btree}) ->
-    ?_assertMatch(Fd, Btree#btree.fd).
-
-should_set_root_correctly(_, {_, Btree}) ->
-    ?_assertMatch(nil, Btree#btree.root).
-
-should_create_zero_sized_btree(_, {_, Btree}) ->
-    ?_assertMatch(0, couch_btree:size(Btree)).
-
-should_set_reduce_option(_, {_, Btree}) ->
-    ReduceFun = fun reduce_fun/2,
-    Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]),
-    ?_assertMatch(ReduceFun, Btree1#btree.reduce).
-
-should_fold_over_empty_btree(_, {_, Btree}) ->
-    {ok, _, EmptyRes} = couch_btree:foldl(Btree, fun(_, X) -> {ok, X+1} end, 0),
-    ?_assertEqual(EmptyRes, 0).
-
-should_add_all_keys(KeyValues, {Fd, Btree}) ->
-    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
-    [
-        should_return_complete_btree_on_adding_all_keys(KeyValues, Btree1),
-        should_have_non_zero_size(Btree1),
-        should_have_lesser_size_than_file(Fd, Btree1),
-        should_keep_root_pointer_to_kp_node(Fd, Btree1),
-        should_remove_all_keys(KeyValues, Btree1)
-    ].
-
-should_return_complete_btree_on_adding_all_keys(KeyValues, Btree) ->
-    ?_assert(test_btree(Btree, KeyValues)).
-
-should_have_non_zero_size(Btree) ->
-    ?_assert(couch_btree:size(Btree) > 0).
-
-should_have_lesser_size_than_file(Fd, Btree) ->
-    ?_assert((couch_btree:size(Btree) =< couch_file:bytes(Fd))).
-
-should_keep_root_pointer_to_kp_node(Fd, Btree) ->
-    ?_assertMatch({ok, {kp_node, _}},
-                  couch_file:pread_term(Fd, element(1, Btree#btree.root))).
-
-should_remove_all_keys(KeyValues, Btree) ->
-    Keys = keys(KeyValues),
-    {ok, Btree1} = couch_btree:add_remove(Btree, [], Keys),
-    {
-        "Should remove all the keys",
-        [
-            should_produce_valid_btree(Btree1, []),
-            should_be_empty(Btree1)
-        ]
-    }.
-
-should_continuously_add_new_kv(KeyValues, {_, Btree}) ->
-    {Btree1, _} = lists:foldl(
-        fun(KV, {BtAcc, PrevSize}) ->
-            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-            ?assert(couch_btree:size(BtAcc2) > PrevSize),
-            {BtAcc2, couch_btree:size(BtAcc2)}
-        end, {Btree, couch_btree:size(Btree)}, KeyValues),
-    {
-        "Should continuously add key-values to btree",
-        [
-            should_produce_valid_btree(Btree1, KeyValues),
-            should_not_be_empty(Btree1)
-        ]
-    }.
-
-should_continuously_remove_keys(KeyValues, {_, Btree}) ->
-    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
-    {Btree2, _} = lists:foldl(
-        fun({K, _}, {BtAcc, PrevSize}) ->
-            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
-            ?assert(couch_btree:size(BtAcc2) < PrevSize),
-            {BtAcc2, couch_btree:size(BtAcc2)}
-        end, {Btree1, couch_btree:size(Btree1)}, KeyValues),
-    {
-        "Should continuously remove keys from btree",
-        [
-            should_produce_valid_btree(Btree2, []),
-            should_be_empty(Btree2)
-        ]
-    }.
-
-should_insert_keys_in_reversed_order(KeyValues, {_, Btree}) ->
-    KeyValuesRev = lists:reverse(KeyValues),
-    {Btree1, _} = lists:foldl(
-        fun(KV, {BtAcc, PrevSize}) ->
-            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-            ?assert(couch_btree:size(BtAcc2) > PrevSize),
-            {BtAcc2, couch_btree:size(BtAcc2)}
-        end, {Btree, couch_btree:size(Btree)}, KeyValuesRev),
-    should_produce_valid_btree(Btree1, KeyValues).
-
-should_add_every_odd_key_remove_every_even(KeyValues, {_, Btree}) ->
-    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
-    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
-        case Count rem 2 == 0 of
-            true -> {Count + 1, [X | Left], Right};
-            false -> {Count + 1, Left, [X | Right]}
-        end
-                                            end, {0, [], []}, KeyValues),
-    ?_assert(test_add_remove(Btree1, Rem2Keys0, Rem2Keys1)).
-
-should_add_every_even_key_remove_every_old(KeyValues, {_, Btree}) ->
-    {ok, Btree1} = couch_btree:add_remove(Btree, KeyValues, []),
-    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
-        case Count rem 2 == 0 of
-            true -> {Count + 1, [X | Left], Right};
-            false -> {Count + 1, Left, [X | Right]}
-        end
-                                            end, {0, [], []}, KeyValues),
-    ?_assert(test_add_remove(Btree1, Rem2Keys1, Rem2Keys0)).
-
-
-should_reduce_without_specified_direction({_, Btree}) ->
-    ?_assertMatch(
-        {ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]},
-        fold_reduce(Btree, [])).
-
-should_reduce_forward({_, Btree}) ->
-    ?_assertMatch(
-        {ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]},
-        fold_reduce(Btree, [{dir, fwd}])).
-
-should_reduce_backward({_, Btree}) ->
-    ?_assertMatch(
-        {ok, [{{"even", _}, ?ROWS div 2}, {{"odd", _}, ?ROWS div 2}]},
-        fold_reduce(Btree, [{dir, rev}])).
-
-should_reduce_whole_range(fwd, {_, Btree}) ->
-    {SK, EK} = {{"even", 0}, {"odd", ?ROWS - 1}},
-    [
-        {
-            "include endkey",
-            ?_assertMatch(
-                {ok, [{{"odd", 1}, ?ROWS div 2},
-                      {{"even", 2}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, fwd},
-                                    {start_key, SK},
-                                    {end_key, EK}]))
-        },
-        {
-            "exclude endkey",
-            ?_assertMatch(
-                {ok, [{{"odd", 1}, (?ROWS div 2) - 1},
-                      {{"even", 2}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, fwd},
-                                    {start_key, SK},
-                                    {end_key_gt, EK}]))
-        }
-    ];
-should_reduce_whole_range(rev, {_, Btree}) ->
-    {SK, EK} = {{"odd", ?ROWS - 1}, {"even", 2}},
-    [
-        {
-            "include endkey",
-            ?_assertMatch(
-                {ok, [{{"even", ?ROWS}, ?ROWS div 2},
-                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, rev},
-                                    {start_key, SK},
-                                    {end_key, EK}]))
-        },
-        {
-            "exclude endkey",
-            ?_assertMatch(
-                {ok, [{{"even", ?ROWS}, (?ROWS div 2) - 1},
-                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, rev},
-                                    {start_key, SK},
-                                    {end_key_gt, EK}]))
-        }
-    ].
-
-should_reduce_first_half(fwd, {_, Btree}) ->
-    {SK, EK} = {{"even", 0}, {"odd", (?ROWS div 2) - 1}},
-    [
-        {
-            "include endkey",
-            ?_assertMatch(
-                {ok, [{{"odd", 1}, ?ROWS div 4},
-                      {{"even", 2}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, fwd},
-                                    {start_key, SK}, {end_key, EK}]))
-        },
-        {
-            "exclude endkey",
-            ?_assertMatch(
-                {ok, [{{"odd", 1}, (?ROWS div 4) - 1},
-                      {{"even", 2}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, fwd},
-                                    {start_key, SK},
-                                    {end_key_gt, EK}]))
-        }
-    ];
-should_reduce_first_half(rev, {_, Btree}) ->
-    {SK, EK} = {{"odd", ?ROWS - 1}, {"even", ?ROWS div 2}},
-    [
-        {
-            "include endkey",
-            ?_assertMatch(
-                {ok, [{{"even", ?ROWS}, (?ROWS div 4) + 1},
-                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, rev},
-                                    {start_key, SK},
-                                    {end_key, EK}]))
-        },
-        {
-            "exclude endkey",
-            ?_assertMatch(
-                {ok, [{{"even", ?ROWS}, ?ROWS div 4},
-                      {{"odd", ?ROWS - 1}, ?ROWS div 2}]},
-                fold_reduce(Btree, [{dir, rev},
-                                    {start_key, SK},
-                                    {end_key_gt, EK}]))
-        }
-    ].
-
-should_reduce_second_half(fwd, {_, Btree}) ->
-    {SK, EK} = {{"even", ?ROWS div 2}, {"odd", ?ROWS - 1}},
-    [
-        {
-            "include endkey",
-            ?_assertMatch(
-                {ok, [{{"odd", 1}, ?ROWS div 2},
-                      {{"even", ?ROWS div 2}, (?ROWS div 4) + 1}]},
-                fold_reduce(Btree, [{dir, fwd},
-                                    {start_key, SK},
-                                    {end_key, EK}]))
-        },
-        {
-            "exclude endkey",
-            ?_assertMatch(
-                {ok, [{{"odd", 1}, (?ROWS div 2) - 1},
-                      {{"even", ?ROWS div 2}, (?ROWS div 4) + 1}]},
-                fold_reduce(Btree, [{dir, fwd},
-                                    {start_key, SK},
-                                    {end_key_gt, EK}]))
-        }
-    ];
-should_reduce_second_half(rev, {_, Btree}) ->
-    {SK, EK} = {{"odd", (?ROWS div 2) + 1}, {"even", 2}},
-    [
-        {
-            "include endkey",
-            ?_assertMatch(
-                {ok, [{{"even", ?ROWS}, ?ROWS div 2},
-                      {{"odd", (?ROWS div 2) + 1}, (?ROWS div 4) + 1}]},
-                fold_reduce(Btree, [{dir, rev},
-                                    {start_key, SK},
-                                    {end_key, EK}]))
-        },
-        {
-            "exclude endkey",
-            ?_assertMatch(
-                {ok, [{{"even", ?ROWS}, (?ROWS div 2) - 1},
-                      {{"odd", (?ROWS div 2) + 1}, (?ROWS div 4) + 1}]},
-                fold_reduce(Btree, [{dir, rev},
-                                    {start_key, SK},
-                                    {end_key_gt, EK}]))
-        }
-    ].
-
-should_produce_valid_btree(Btree, KeyValues) ->
-    ?_assert(test_btree(Btree, KeyValues)).
-
-should_be_empty(Btree) ->
-    ?_assertEqual(couch_btree:size(Btree), 0).
-
-should_not_be_empty(Btree) ->
-    ?_assert(couch_btree:size(Btree) > 0).
-
-fold_reduce(Btree, Opts) ->
-    GroupFun = fun({K1, _}, {K2, _}) ->
-        K1 == K2
-    end,
-    FoldFun = fun(GroupedKey, Unreduced, Acc) ->
-        {ok, [{GroupedKey, couch_btree:final_reduce(Btree, Unreduced)} | Acc]}
-    end,
-    couch_btree:fold_reduce(Btree, FoldFun, [],
-                            [{key_group_fun, GroupFun}] ++ Opts).
-
-
-keys(KVs) ->
-    [K || {K, _} <- KVs].
-
-reduce_fun(reduce, KVs) ->
-    length(KVs);
-reduce_fun(rereduce, Reds) ->
-    lists:sum(Reds).
-
-
-shuffle(List) ->
-    randomize(round(math:log(length(List)) + 0.5), List).
-
-randomize(1, List) ->
-    randomize(List);
-randomize(T, List) ->
-    lists:foldl(
-        fun(_E, Acc) ->
-            randomize(Acc)
-        end, randomize(List), lists:seq(1, (T - 1))).
-
-randomize(List) ->
-    D = lists:map(fun(A) -> {random:uniform(), A} end, List),
-    {_, D1} = lists:unzip(lists:keysort(1, D)),
-    D1.
-
-test_btree(Btree, KeyValues) ->
-    ok = test_key_access(Btree, KeyValues),
-    ok = test_lookup_access(Btree, KeyValues),
-    ok = test_final_reductions(Btree, KeyValues),
-    ok = test_traversal_callbacks(Btree, KeyValues),
-    true.
-
-test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
-    Btree2 = lists:foldl(
-        fun({K, _}, BtAcc) ->
-            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
-            BtAcc2
-        end, Btree, OutKeyValues),
-    true = test_btree(Btree2, RemainingKeyValues),
-
-    Btree3 = lists:foldl(
-        fun(KV, BtAcc) ->
-            {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
-            BtAcc2
-        end, Btree2, OutKeyValues),
-    true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues).
-
-test_key_access(Btree, List) ->
-    FoldFun = fun(Element, {[HAcc|TAcc], Count}) ->
-        case Element == HAcc of
-            true -> {ok, {TAcc, Count + 1}};
-            _ -> {ok, {TAcc, Count + 1}}
-        end
-    end,
-    Length = length(List),
-    Sorted = lists:sort(List),
-    {ok, _, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}),
-    {ok, _, {[], Length}} = couch_btree:fold(Btree, FoldFun,
-                                             {Sorted, 0}, [{dir, rev}]),
-    ok.
-
-test_lookup_access(Btree, KeyValues) ->
-    FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end,
-    lists:foreach(
-        fun({Key, Value}) ->
-            [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]),
-            {ok, _, true} = couch_btree:foldl(Btree, FoldFun,
-                                              {Key, Value}, [{start_key, Key}])
-        end, KeyValues).
-
-test_final_reductions(Btree, KeyValues) ->
-    KVLen = length(KeyValues),
-    FoldLFun = fun(_X, LeadingReds, Acc) ->
-        CountToStart = KVLen div 3 + Acc,
-        CountToStart = couch_btree:final_reduce(Btree, LeadingReds),
-        {ok, Acc + 1}
-    end,
-    FoldRFun = fun(_X, LeadingReds, Acc) ->
-        CountToEnd = KVLen - KVLen div 3 + Acc,
-        CountToEnd = couch_btree:final_reduce(Btree, LeadingReds),
-        {ok, Acc + 1}
-    end,
-    {LStartKey, _} = case KVLen of
-        0 -> {nil, nil};
-        _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues))
-    end,
-    {RStartKey, _} = case KVLen of
-        0 -> {nil, nil};
-        _ -> lists:nth(KVLen div 3, lists:sort(KeyValues))
-    end,
-    {ok, _, FoldLRed} = couch_btree:foldl(Btree, FoldLFun, 0,
-                                          [{start_key, LStartKey}]),
-    {ok, _, FoldRRed} = couch_btree:fold(Btree, FoldRFun, 0,
-                                         [{dir, rev}, {start_key, RStartKey}]),
-    KVLen = FoldLRed + FoldRRed,
-    ok.
-
-test_traversal_callbacks(Btree, _KeyValues) ->
-    FoldFun = fun
-        (visit, _GroupedKey, _Unreduced, Acc) ->
-            {ok, Acc andalso false};
-        (traverse, _LK, _Red, Acc) ->
-            {skip, Acc andalso true}
-    end,
-    % With 250 items the root is a kp. Always skipping should reduce to true.
-    {ok, _, true} = couch_btree:fold(Btree, FoldFun, true, [{dir, fwd}]),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_changes_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_changes_tests.erl b/test/couchdb/couch_changes_tests.erl
deleted file mode 100644
index a129ba2..0000000
--- a/test/couchdb/couch_changes_tests.erl
+++ /dev/null
@@ -1,612 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_changes_tests).
-
--include("couch_eunit.hrl").
--include_lib("couchdb/couch_db.hrl").
-
--define(ADMIN_USER, {user_ctx, #user_ctx{roles = [<<"_admin">>]}}).
--define(TIMEOUT, 3000).
--define(TEST_TIMEOUT, 10000).
-
--record(row, {
-    id,
-    seq,
-    deleted = false
-}).
-
-
-start() ->
-    {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
-    Pid.
-
-stop(Pid) ->
-    erlang:monitor(process, Pid),
-    couch_server_sup:stop(),
-    receive
-        {'DOWN', _, _, Pid, _} ->
-            ok
-    after ?TIMEOUT ->
-        throw({timeout, server_stop})
-    end.
-
-setup() ->
-    DbName = ?tempdb(),
-    {ok, Db} = create_db(DbName),
-    Revs = [R || {ok, R} <- [
-        save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"doc5">>}]})
-    ]],
-    Rev = lists:nth(3, Revs),
-    {ok, Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}, {<<"_rev">>, Rev}]}),
-    Revs1 = Revs ++ [Rev1],
-    Revs2 = Revs1 ++ [R || {ok, R} <- [
-        save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"_design/foo">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-        save_doc(Db, {[{<<"_id">>, <<"doc8">>}]})
-    ]],
-    {DbName, list_to_tuple(Revs2)}.
-
-teardown({DbName, _}) ->
-    delete_db(DbName),
-    ok.
-
-
-changes_test_() ->
-    {
-        "Changes feeed",
-        {
-            setup,
-            fun start/0, fun stop/1,
-            [
-                filter_by_doc_id(),
-                filter_by_design(),
-                continuous_feed(),
-                filter_by_custom_function()
-            ]
-        }
-    }.
-
-filter_by_doc_id() ->
-    {
-        "Filter _doc_id",
-        {
-            foreach,
-            fun setup/0, fun teardown/1,
-            [
-                fun should_filter_by_specific_doc_ids/1,
-                fun should_filter_by_specific_doc_ids_descending/1,
-                fun should_filter_by_specific_doc_ids_with_since/1,
-                fun should_filter_by_specific_doc_ids_no_result/1,
-                fun should_handle_deleted_docs/1
-            ]
-        }
-    }.
-
-filter_by_design() ->
-    {
-        "Filter _design",
-        {
-            foreach,
-            fun setup/0, fun teardown/1,
-            [
-                fun should_emit_only_design_documents/1
-            ]
-        }
-    }.
-
-filter_by_custom_function() ->
-    {
-        "Filter function",
-        {
-            foreach,
-            fun setup/0, fun teardown/1,
-            [
-                fun should_receive_heartbeats/1
-            ]
-        }
-    }.
-
-continuous_feed() ->
-    {
-        "Continuous Feed",
-        {
-            foreach,
-            fun setup/0, fun teardown/1,
-            [
-                fun should_filter_continuous_feed_by_specific_doc_ids/1
-            ]
-        }
-    }.
-
-
-should_filter_by_specific_doc_ids({DbName, _}) ->
-    ?_test(
-        begin
-            ChangesArgs = #changes_args{
-                filter = "_doc_ids"
-            },
-            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
-
-            {Rows, LastSeq} = wait_finished(Consumer),
-            {ok, Db} = couch_db:open_int(DbName, []),
-            UpSeq = couch_db:get_update_seq(Db),
-            couch_db:close(Db),
-            stop_consumer(Consumer),
-
-            ?assertEqual(2, length(Rows)),
-            [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-            ?assertEqual(<<"doc4">>, Id1),
-            ?assertEqual(4, Seq1),
-            ?assertEqual(<<"doc3">>, Id2),
-            ?assertEqual(6, Seq2),
-            ?assertEqual(UpSeq, LastSeq)
-        end).
-
-should_filter_by_specific_doc_ids_descending({DbName, _}) ->
-    ?_test(
-        begin
-            ChangesArgs = #changes_args{
-                filter = "_doc_ids",
-                dir = rev
-            },
-            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
-
-            {Rows, LastSeq} = wait_finished(Consumer),
-            {ok, Db} = couch_db:open_int(DbName, []),
-            couch_db:close(Db),
-            stop_consumer(Consumer),
-
-            ?assertEqual(2, length(Rows)),
-            [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-            ?assertEqual(<<"doc3">>, Id1),
-            ?assertEqual(6, Seq1),
-            ?assertEqual(<<"doc4">>, Id2),
-            ?assertEqual(4, Seq2),
-            ?assertEqual(4, LastSeq)
-        end).
-
-should_filter_by_specific_doc_ids_with_since({DbName, _}) ->
-    ?_test(
-        begin
-            ChangesArgs = #changes_args{
-                filter = "_doc_ids",
-                since = 5
-            },
-            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
-
-            {Rows, LastSeq} = wait_finished(Consumer),
-            {ok, Db} = couch_db:open_int(DbName, []),
-            UpSeq = couch_db:get_update_seq(Db),
-            couch_db:close(Db),
-            stop_consumer(Consumer),
-
-            ?assertEqual(1, length(Rows)),
-            [#row{seq = Seq1, id = Id1}] = Rows,
-            ?assertEqual(<<"doc3">>, Id1),
-            ?assertEqual(6, Seq1),
-            ?assertEqual(UpSeq, LastSeq)
-        end).
-
-should_filter_by_specific_doc_ids_no_result({DbName, _}) ->
-    ?_test(
-        begin
-            ChangesArgs = #changes_args{
-                filter = "_doc_ids",
-                since = 6
-            },
-            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
-
-            {Rows, LastSeq} = wait_finished(Consumer),
-            {ok, Db} = couch_db:open_int(DbName, []),
-            UpSeq = couch_db:get_update_seq(Db),
-            couch_db:close(Db),
-            stop_consumer(Consumer),
-
-            ?assertEqual(0, length(Rows)),
-            ?assertEqual(UpSeq, LastSeq)
-        end).
-
-should_handle_deleted_docs({DbName, Revs}) ->
-    ?_test(
-        begin
-            Rev3_2 = element(6, Revs),
-            {ok, Db} = couch_db:open_int(DbName, []),
-            {ok, _} = save_doc(
-                Db,
-                {[{<<"_id">>, <<"doc3">>},
-                  {<<"_deleted">>, true},
-                  {<<"_rev">>, Rev3_2}]}),
-
-            ChangesArgs = #changes_args{
-                filter = "_doc_ids",
-                since = 9
-            },
-            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
-
-            {Rows, LastSeq} = wait_finished(Consumer),
-            couch_db:close(Db),
-            stop_consumer(Consumer),
-
-            ?assertEqual(1, length(Rows)),
-            ?assertMatch(
-                [#row{seq = LastSeq, id = <<"doc3">>, deleted = true}],
-                Rows
-            ),
-            ?assertEqual(11, LastSeq)
-        end).
-
-should_filter_continuous_feed_by_specific_doc_ids({DbName, Revs}) ->
-    ?_test(
-        begin
-            {ok, Db} = couch_db:open_int(DbName, []),
-            ChangesArgs = #changes_args{
-                filter = "_doc_ids",
-                feed = "continuous"
-            },
-            DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
-            Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
-            Consumer = spawn_consumer(DbName, ChangesArgs, Req),
-            pause(Consumer),
-
-            Rows = get_rows(Consumer),
-            ?assertEqual(2, length(Rows)),
-            [#row{seq = Seq1, id = Id1}, #row{seq = Seq2, id = Id2}] = Rows,
-            ?assertEqual(<<"doc4">>, Id1),
-            ?assertEqual(4, Seq1),
-            ?assertEqual(<<"doc3">>, Id2),
-            ?assertEqual(6, Seq2),
-
-            clear_rows(Consumer),
-            {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
-            {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
-            unpause(Consumer),
-            pause(Consumer),
-            ?assertEqual([], get_rows(Consumer)),
-
-            Rev4 = element(4, Revs),
-            Rev3_2 = element(6, Revs),
-            {ok, Rev4_2} = save_doc(Db, {[{<<"_id">>, <<"doc4">>},
-                                          {<<"_rev">>, Rev4}]}),
-            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
-            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc4">>},
-                                     {<<"_rev">>, Rev4_2}]}),
-            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
-            {ok, Rev3_3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>},
-                                          {<<"_rev">>, Rev3_2}]}),
-            unpause(Consumer),
-            pause(Consumer),
-
-            NewRows = get_rows(Consumer),
-            ?assertEqual(2, length(NewRows)),
-            [Row14, Row16] = NewRows,
-            ?assertEqual(<<"doc4">>, Row14#row.id),
-            ?assertEqual(15, Row14#row.seq),
-            ?assertEqual(<<"doc3">>, Row16#row.id),
-            ?assertEqual(17, Row16#row.seq),
-
-            clear_rows(Consumer),
-            {ok, _Rev3_4} = save_doc(Db, {[{<<"_id">>, <<"doc3">>},
-                                           {<<"_rev">>, Rev3_3}]}),
-            unpause(Consumer),
-            pause(Consumer),
-
-            FinalRows = get_rows(Consumer),
-
-            unpause(Consumer),
-            stop_consumer(Consumer),
-
-            ?assertMatch([#row{seq = 18, id = <<"doc3">>}], FinalRows)
-        end).
-
-should_emit_only_design_documents({DbName, Revs}) ->
-    ?_test(
-        begin
-            ChangesArgs = #changes_args{
-                filter = "_design"
-            },
-            Consumer = spawn_consumer(DbName, ChangesArgs, {json_req, null}),
-
-            {Rows, LastSeq} = wait_finished(Consumer),
-            {ok, Db} = couch_db:open_int(DbName, []),
-            UpSeq = couch_db:get_update_seq(Db),
-            couch_db:close(Db),
-
-            ?assertEqual(1, length(Rows)),
-            ?assertEqual(UpSeq, LastSeq),
-            ?assertEqual([#row{seq = 8, id = <<"_design/foo">>}], Rows),
-
-            stop_consumer(Consumer),
-
-            {ok, Db2} = couch_db:open_int(DbName, [?ADMIN_USER]),
-            {ok, _} = save_doc(Db2, {[{<<"_id">>, <<"_design/foo">>},
-                                      {<<"_rev">>, element(8, Revs)},
-                                      {<<"_deleted">>, true}]}),
-
-            Consumer2 = spawn_consumer(DbName, ChangesArgs, {json_req, null}),
-
-            {Rows2, LastSeq2} = wait_finished(Consumer2),
-            UpSeq2 = UpSeq + 1,
-            couch_db:close(Db2),
-
-            ?assertEqual(1, length(Rows2)),
-            ?assertEqual(UpSeq2, LastSeq2),
-            ?assertEqual([#row{seq = 11,
-                               id = <<"_design/foo">>,
-                               deleted = true}],
-                          Rows2)
-        end).
-
-should_receive_heartbeats(_) ->
-    {timeout, ?TEST_TIMEOUT div 1000,
-     ?_test(
-         begin
-             DbName = ?tempdb(),
-             Timeout = 100,
-             {ok, Db} = create_db(DbName),
-
-             {ok, _} = save_doc(Db, {[
-                 {<<"_id">>, <<"_design/filtered">>},
-                 {<<"language">>, <<"javascript">>},
-                     {<<"filters">>, {[
-                         {<<"foo">>, <<"function(doc) {
-                             return ['doc10', 'doc11', 'doc12'].indexOf(doc._id) != -1;}">>
-                     }]}}
-             ]}),
-
-             ChangesArgs = #changes_args{
-                 filter = "filtered/foo",
-                 feed = "continuous",
-                 timeout = 10000,
-                 heartbeat = 1000
-             },
-             Consumer = spawn_consumer(DbName, ChangesArgs, {json_req, null}),
-
-             {ok, _Rev1} = save_doc(Db, {[{<<"_id">>, <<"doc1">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev2} = save_doc(Db, {[{<<"_id">>, <<"doc2">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev3} = save_doc(Db, {[{<<"_id">>, <<"doc3">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev4} = save_doc(Db, {[{<<"_id">>, <<"doc4">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev5} = save_doc(Db, {[{<<"_id">>, <<"doc5">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev6} = save_doc(Db, {[{<<"_id">>, <<"doc6">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev7} = save_doc(Db, {[{<<"_id">>, <<"doc7">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev8} = save_doc(Db, {[{<<"_id">>, <<"doc8">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev9} = save_doc(Db, {[{<<"_id">>, <<"doc9">>}]}),
-
-             Heartbeats = get_heartbeats(Consumer),
-             ?assert(Heartbeats > 0),
-
-             {ok, _Rev10} = save_doc(Db, {[{<<"_id">>, <<"doc10">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev11} = save_doc(Db, {[{<<"_id">>, <<"doc11">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev12} = save_doc(Db, {[{<<"_id">>, <<"doc12">>}]}),
-
-             Heartbeats2 = get_heartbeats(Consumer),
-             ?assert(Heartbeats2 > Heartbeats),
-
-             Rows = get_rows(Consumer),
-             ?assertEqual(3, length(Rows)),
-
-             {ok, _Rev13} = save_doc(Db, {[{<<"_id">>, <<"doc13">>}]}),
-             timer:sleep(Timeout),
-             {ok, _Rev14} = save_doc(Db, {[{<<"_id">>, <<"doc14">>}]}),
-             timer:sleep(Timeout),
-
-             Heartbeats3 = get_heartbeats(Consumer),
-             ?assert(Heartbeats3 > Heartbeats2)
-        end)}.
-
-
-save_doc(Db, Json) ->
-    Doc = couch_doc:from_json_obj(Json),
-    {ok, Rev} = couch_db:update_doc(Db, Doc, []),
-    {ok, couch_doc:rev_to_str(Rev)}.
-
-get_rows(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {get_rows, Ref},
-    Resp = receive
-        {rows, Ref, Rows} ->
-            Rows
-    after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-get_heartbeats(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {get_heartbeats, Ref},
-    Resp = receive
-        {hearthbeats, Ref, HeartBeats} ->
-            HeartBeats
-    after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-clear_rows(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {reset, Ref},
-    Resp = receive
-        {ok, Ref} ->
-            ok
-    after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-stop_consumer(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {stop, Ref},
-    Resp = receive
-        {ok, Ref} ->
-            ok
-    after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-pause(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {pause, Ref},
-    Resp = receive
-        {paused, Ref} ->
-            ok
-    after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-unpause(Consumer) ->
-    Ref = make_ref(),
-    Consumer ! {continue, Ref},
-    Resp = receive
-        {ok, Ref} ->
-            ok
-    after ?TIMEOUT ->
-       timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-wait_finished(_Consumer) ->
-    Resp = receive
-        {consumer_finished, Rows, LastSeq} ->
-            {Rows, LastSeq}
-    after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
-
-spawn_consumer(DbName, ChangesArgs0, Req) ->
-    Parent = self(),
-    spawn(fun() ->
-        put(heartbeat_count, 0),
-        Callback = fun
-            ({change, {Change}, _}, _, Acc) ->
-                Id = couch_util:get_value(<<"id">>, Change),
-                Seq = couch_util:get_value(<<"seq">>, Change),
-                Del = couch_util:get_value(<<"deleted">>, Change, false),
-                [#row{id = Id, seq = Seq, deleted = Del} | Acc];
-            ({stop, LastSeq}, _, Acc) ->
-                Parent ! {consumer_finished, lists:reverse(Acc), LastSeq},
-                stop_loop(Parent, Acc);
-            (timeout, _, Acc) ->
-                put(heartbeat_count, get(heartbeat_count) + 1),
-                maybe_pause(Parent, Acc);
-            (_, _, Acc) ->
-                maybe_pause(Parent, Acc)
-        end,
-        {ok, Db} = couch_db:open_int(DbName, []),
-        ChangesArgs = case (ChangesArgs0#changes_args.timeout =:= undefined)
-            andalso (ChangesArgs0#changes_args.heartbeat =:= undefined) of
-            true ->
-                ChangesArgs0#changes_args{timeout = 10, heartbeat = 10};
-            false ->
-                ChangesArgs0
-        end,
-        FeedFun = couch_changes:handle_changes(ChangesArgs, Req, Db),
-        try
-            FeedFun({Callback, []})
-        catch throw:{stop, _} ->
-            ok
-        end,
-        catch couch_db:close(Db)
-    end).
-
-maybe_pause(Parent, Acc) ->
-    receive
-        {get_rows, Ref} ->
-            Parent ! {rows, Ref, lists:reverse(Acc)},
-            maybe_pause(Parent, Acc);
-        {get_heartbeats, Ref} ->
-            Parent ! {hearthbeats, Ref, get(heartbeat_count)},
-            maybe_pause(Parent, Acc);
-        {reset, Ref} ->
-            Parent ! {ok, Ref},
-            maybe_pause(Parent, []);
-        {pause, Ref} ->
-            Parent ! {paused, Ref},
-            pause_loop(Parent, Acc);
-        {stop, Ref} ->
-            Parent ! {ok, Ref},
-            throw({stop, Acc});
-        V ->
-            erlang:error({assertion_failed,
-                      [{module, ?MODULE},
-                       {line, ?LINE},
-                       {value, V},
-                       {reason, "Received unexpected message"}]})
-    after 0 ->
-        Acc
-    end.
-
-pause_loop(Parent, Acc) ->
-    receive
-        {stop, Ref} ->
-            Parent ! {ok, Ref},
-            throw({stop, Acc});
-        {reset, Ref} ->
-            Parent ! {ok, Ref},
-            pause_loop(Parent, []);
-        {continue, Ref} ->
-            Parent ! {ok, Ref},
-            Acc;
-        {get_rows, Ref} ->
-            Parent ! {rows, Ref, lists:reverse(Acc)},
-            pause_loop(Parent, Acc)
-    end.
-
-stop_loop(Parent, Acc) ->
-    receive
-        {get_rows, Ref} ->
-            Parent ! {rows, Ref, lists:reverse(Acc)},
-            stop_loop(Parent, Acc);
-        {stop, Ref} ->
-            Parent ! {ok, Ref},
-            Acc
-    end.
-
-create_db(DbName) ->
-    couch_db:create(DbName, [?ADMIN_USER, overwrite]).
-
-delete_db(DbName) ->
-    ok = couch_server:delete(DbName, [?ADMIN_USER]).

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_db_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_db_tests.erl b/test/couchdb/couch_db_tests.erl
deleted file mode 100644
index 3089714..0000000
--- a/test/couchdb/couch_db_tests.erl
+++ /dev/null
@@ -1,114 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_db_tests).
-
--include("couch_eunit.hrl").
-
--define(TIMEOUT, 120).
-
-
-setup() ->
-    {ok, _} = couch_server_sup:start_link(?CONFIG_CHAIN),
-    couch_config:set("log", "include_sasl", "false", false),
-    ok.
-
-teardown(_) ->
-    couch_server_sup:stop().
-
-
-create_delete_db_test_()->
-    {
-        "Database create/delete tests",
-        {
-            setup,
-            fun setup/0, fun teardown/1,
-            fun(_) ->
-                [should_create_db(),
-                 should_delete_db(),
-                 should_create_multiple_dbs(),
-                 should_delete_multiple_dbs(),
-                 should_create_delete_database_continuously()]
-            end
-        }
-    }.
-
-
-should_create_db() ->
-    DbName = ?tempdb(),
-    {ok, Db} = couch_db:create(DbName, []),
-    ok = couch_db:close(Db),
-    {ok, AllDbs} = couch_server:all_databases(),
-    ?_assert(lists:member(DbName, AllDbs)).
-
-should_delete_db() ->
-    DbName = ?tempdb(),
-    couch_db:create(DbName, []),
-    couch_server:delete(DbName, []),
-    {ok, AllDbs} = couch_server:all_databases(),
-    ?_assertNot(lists:member(DbName, AllDbs)).
-
-should_create_multiple_dbs() ->
-    gen_server:call(couch_server, {set_max_dbs_open, 3}),
-
-    DbNames = [?tempdb() || _ <- lists:seq(1, 6)],
-    lists:foreach(fun(DbName) ->
-        {ok, Db} = couch_db:create(DbName, []),
-        ok = couch_db:close(Db)
-    end, DbNames),
-
-    {ok, AllDbs} = couch_server:all_databases(),
-    NumCreated = lists:foldl(fun(DbName, Acc) ->
-        ?assert(lists:member(DbName, AllDbs)),
-        Acc+1
-    end, 0, DbNames),
-
-    ?_assertEqual(NumCreated, 6).
-
-should_delete_multiple_dbs() ->
-    DbNames = [?tempdb() || _ <- lists:seq(1, 6)],
-    lists:foreach(fun(DbName) ->
-        {ok, Db} = couch_db:create(DbName, []),
-        ok = couch_db:close(Db)
-    end, DbNames),
-
-    lists:foreach(fun(DbName) ->
-        ok = couch_server:delete(DbName, [])
-    end, DbNames),
-
-    {ok, AllDbs} = couch_server:all_databases(),
-    NumDeleted = lists:foldl(fun(DbName, Acc) ->
-        ?assertNot(lists:member(DbName, AllDbs)),
-        Acc + 1
-    end, 0, DbNames),
-
-    ?_assertEqual(NumDeleted, 6).
-
-should_create_delete_database_continuously() ->
-    DbName = ?tempdb(),
-    {ok, Db} = couch_db:create(DbName, []),
-    couch_db:close(Db),
-    [{timeout, ?TIMEOUT, {integer_to_list(N) ++ " times",
-                           ?_assert(loop(DbName, N))}}
-     || N <- [10, 100, 1000]].
-
-loop(_, 0) ->
-    true;
-loop(DbName, N) ->
-    ok = cycle(DbName),
-    loop(DbName, N - 1).
-
-cycle(DbName) ->
-    ok = couch_server:delete(DbName, []),
-    {ok, Db} = couch_db:create(DbName, []),
-    couch_db:close(Db),
-    ok.

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_doc_json_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_doc_json_tests.erl b/test/couchdb/couch_doc_json_tests.erl
deleted file mode 100644
index 1592b6b..0000000
--- a/test/couchdb/couch_doc_json_tests.erl
+++ /dev/null
@@ -1,391 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_doc_json_tests).
-
--include("couch_eunit.hrl").
--include_lib("couchdb/couch_db.hrl").
-
-
-setup() ->
-    couch_config:start_link(?CONFIG_CHAIN),
-    couch_config:set("attachments", "compression_level", "0", false),
-    ok.
-
-teardown(_) ->
-    couch_config:stop().
-
-
-json_doc_test_() ->
-    {
-        setup,
-        fun setup/0, fun teardown/1,
-        [
-            {
-                "Document from JSON",
-                [
-                    from_json_success_cases(),
-                    from_json_error_cases()
-                ]
-            },
-            {
-                "Document to JSON",
-                [
-                    to_json_success_cases()
-                ]
-            }
-        ]
-    }.
-
-from_json_success_cases() ->
-    Cases = [
-        {
-            {[]},
-            #doc{},
-            "Return an empty document for an empty JSON object."
-        },
-        {
-            {[{<<"_id">>, <<"zing!">>}]},
-            #doc{id = <<"zing!">>},
-            "Parses document ids."
-        },
-        {
-            {[{<<"_id">>, <<"_design/foo">>}]},
-            #doc{id = <<"_design/foo">>},
-            "_design/document ids."
-        },
-        {
-            {[{<<"_id">>, <<"_local/bam">>}]},
-            #doc{id = <<"_local/bam">>},
-            "_local/document ids."
-        },
-        {
-            {[{<<"_rev">>, <<"4-230234">>}]},
-            #doc{revs = {4, [<<"230234">>]}},
-            "_rev stored in revs."
-        },
-        {
-            {[{<<"soap">>, 35}]},
-            #doc{body = {[{<<"soap">>, 35}]}},
-            "Non underscore prefixed fields stored in body."
-        },
-        {
-            {[{<<"_attachments">>, {[
-                {<<"my_attachment.fu">>, {[
-                    {<<"stub">>, true},
-                    {<<"content_type">>, <<"application/awesome">>},
-                    {<<"length">>, 45}
-                ]}},
-                {<<"noahs_private_key.gpg">>, {[
-                    {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
-                    {<<"content_type">>, <<"application/pgp-signature">>}
-                ]}}
-            ]}}]},
-            #doc{atts = [
-                #att{
-                    name = <<"my_attachment.fu">>,
-                    data = stub,
-                    type = <<"application/awesome">>,
-                    att_len = 45,
-                    disk_len = 45,
-                    revpos = nil
-                },
-                #att{
-                    name = <<"noahs_private_key.gpg">>,
-                    data = <<"I have a pet fish!">>,
-                    type = <<"application/pgp-signature">>,
-                    att_len = 18,
-                    disk_len = 18,
-                    revpos = 0
-                }
-            ]},
-            "Attachments are parsed correctly."
-        },
-        {
-            {[{<<"_deleted">>, true}]},
-            #doc{deleted = true},
-            "_deleted controls the deleted field."
-        },
-        {
-            {[{<<"_deleted">>, false}]},
-            #doc{},
-            "{\"_deleted\": false} is ok."
-        },
-        {
-            {[
-                 {<<"_revisions">>,
-                  {[{<<"start">>, 4},
-                    {<<"ids">>, [<<"foo1">>, <<"phi3">>, <<"omega">>]}]}},
-                 {<<"_rev">>, <<"6-something">>}
-             ]},
-            #doc{revs = {4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
-            "_revisions attribute are preferred to _rev."
-        },
-        {
-            {[{<<"_revs_info">>, dropping}]},
-            #doc{},
-            "Drops _revs_info."
-        },
-        {
-            {[{<<"_local_seq">>, dropping}]},
-            #doc{},
-            "Drops _local_seq."
-        },
-        {
-            {[{<<"_conflicts">>, dropping}]},
-            #doc{},
-            "Drops _conflicts."
-        },
-        {
-            {[{<<"_deleted_conflicts">>, dropping}]},
-            #doc{},
-            "Drops _deleted_conflicts."
-        }
-    ],
-    lists:map(
-        fun({EJson, Expect, Msg}) ->
-            {Msg, ?_assertMatch(Expect, couch_doc:from_json_obj(EJson))}
-        end,
-        Cases).
-
-from_json_error_cases() ->
-    Cases = [
-        {
-            [],
-            {bad_request, "Document must be a JSON object"},
-            "arrays are invalid"
-        },
-        {
-            4,
-            {bad_request, "Document must be a JSON object"},
-            "integers are invalid"
-        },
-        {
-            true,
-            {bad_request, "Document must be a JSON object"},
-            "literals are invalid"
-        },
-        {
-            {[{<<"_id">>, {[{<<"foo">>, 5}]}}]},
-            {bad_request, <<"Document id must be a string">>},
-            "Document id must be a string."
-        },
-        {
-            {[{<<"_id">>, <<"_random">>}]},
-            {bad_request,
-             <<"Only reserved document ids may start with underscore.">>},
-            "Disallow arbitrary underscore prefixed docids."
-        },
-        {
-            {[{<<"_rev">>, 5}]},
-            {bad_request, <<"Invalid rev format">>},
-            "_rev must be a string"
-        },
-        {
-            {[{<<"_rev">>, "foobar"}]},
-            {bad_request, <<"Invalid rev format">>},
-            "_rev must be %d-%s"
-        },
-        {
-            {[{<<"_rev">>, "foo-bar"}]},
-            "Error if _rev's integer expection is broken."
-        },
-        {
-            {[{<<"_revisions">>, {[{<<"start">>, true}]}}]},
-            {doc_validation, "_revisions.start isn't an integer."},
-            "_revisions.start must be an integer."
-        },
-        {
-            {[{<<"_revisions">>, {[{<<"start">>, 0}, {<<"ids">>, 5}]}}]},
-            {doc_validation, "_revisions.ids isn't a array."},
-            "_revions.ids must be a list."
-        },
-        {
-            {[{<<"_revisions">>, {[{<<"start">>, 0}, {<<"ids">>, [5]}]}}]},
-            {doc_validation, "RevId isn't a string"},
-            "Revision ids must be strings."
-        },
-        {
-            {[{<<"_something">>, 5}]},
-            {doc_validation, <<"Bad special document member: _something">>},
-            "Underscore prefix fields are reserved."
-        }
-    ],
-
-    lists:map(fun
-        ({EJson, Expect, Msg}) ->
-            Error = (catch couch_doc:from_json_obj(EJson)),
-            {Msg, ?_assertMatch(Expect, Error)};
-        ({EJson, Msg}) ->
-            try
-                couch_doc:from_json_obj(EJson),
-                {"Conversion failed to raise an exception", ?_assert(false)}
-            catch
-                _:_ -> {Msg, ?_assert(true)}
-            end
-    end, Cases).
-
-to_json_success_cases() ->
-    Cases = [
-        {
-            #doc{},
-            {[{<<"_id">>, <<"">>}]},
-            "Empty docs are {\"_id\": \"\"}"
-        },
-        {
-            #doc{id = <<"foo">>},
-            {[{<<"_id">>, <<"foo">>}]},
-            "_id is added."
-        },
-        {
-            #doc{revs = {5, ["foo"]}},
-            {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]},
-            "_rev is added."
-        },
-        {
-            [revs],
-            #doc{revs = {5, [<<"first">>, <<"second">>]}},
-            {[
-                 {<<"_id">>, <<>>},
-                 {<<"_rev">>, <<"5-first">>},
-                 {<<"_revisions">>, {[
-                     {<<"start">>, 5},
-                     {<<"ids">>, [<<"first">>, <<"second">>]}
-                 ]}}
-             ]},
-            "_revisions include with revs option"
-        },
-        {
-            #doc{body = {[{<<"foo">>, <<"bar">>}]}},
-            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]},
-            "Arbitrary fields are added."
-        },
-        {
-            #doc{deleted = true, body = {[{<<"foo">>, <<"bar">>}]}},
-            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}, {<<"_deleted">>, true}]},
-            "Deleted docs no longer drop body members."
-        },
-        {
-            #doc{meta = [
-                {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>, missing}]}
-            ]},
-            {[
-                 {<<"_id">>, <<>>},
-                 {<<"_revs_info">>, [
-                     {[{<<"rev">>, <<"4-fin">>}, {<<"status">>, <<"deleted">>}]},
-                     {[{<<"rev">>, <<"3-zim">>}, {<<"status">>, <<"missing">>}]}
-                 ]}
-             ]},
-            "_revs_info field is added correctly."
-        },
-        {
-            #doc{meta = [{local_seq, 5}]},
-            {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]},
-            "_local_seq is added as an integer."
-        },
-        {
-            #doc{meta = [{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]}
-            ]},
-            "_conflicts is added as an array of strings."
-        },
-        {
-            #doc{meta = [{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
-            {[
-                 {<<"_id">>, <<>>},
-                 {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
-             ]},
-            "_deleted_conflicsts is added as an array of strings."
-        },
-        {
-            #doc{atts = [
-                #att{
-                    name = <<"big.xml">>,
-                    type = <<"xml/sucks">>,
-                    data = fun() -> ok end,
-                    revpos = 1,
-                    att_len = 400,
-                    disk_len = 400
-                },
-                #att{
-                    name = <<"fast.json">>,
-                    type = <<"json/ftw">>,
-                    data = <<"{\"so\": \"there!\"}">>,
-                    revpos = 1,
-                    att_len = 16,
-                    disk_len = 16
-                }
-            ]},
-            {[
-                 {<<"_id">>, <<>>},
-                 {<<"_attachments">>, {[
-                       {<<"big.xml">>, {[
-                           {<<"content_type">>, <<"xml/sucks">>},
-                           {<<"revpos">>, 1},
-                           {<<"length">>, 400},
-                           {<<"stub">>, true}
-                       ]}},
-                       {<<"fast.json">>, {[
-                           {<<"content_type">>, <<"json/ftw">>},
-                           {<<"revpos">>, 1},
-                           {<<"length">>, 16},
-                           {<<"stub">>, true}
-                       ]}}
-                ]}}
-            ]},
-            "Attachments attached as stubs only include a length."
-        },
-        {
-            [attachments],
-            #doc{atts = [
-                #att{
-                    name = <<"stuff.txt">>,
-                    type = <<"text/plain">>,
-                    data = fun() -> <<"diet pepsi">> end,
-                    revpos = 1,
-                    att_len = 10,
-                    disk_len = 10
-                },
-                #att{
-                    name = <<"food.now">>,
-                    type = <<"application/food">>,
-                    revpos = 1,
-                    data = <<"sammich">>
-                }
-            ]},
-            {[
-                {<<"_id">>, <<>>},
-                {<<"_attachments">>, {[
-                   {<<"stuff.txt">>, {[
-                       {<<"content_type">>, <<"text/plain">>},
-                       {<<"revpos">>, 1},
-                       {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
-                   ]}},
-                   {<<"food.now">>, {[
-                       {<<"content_type">>, <<"application/food">>},
-                       {<<"revpos">>, 1},
-                       {<<"data">>, <<"c2FtbWljaA==">>}
-                   ]}}
-                ]}}
-            ]},
-            "Attachments included inline with attachments option."
-        }
-    ],
-
-    lists:map(fun
-        ({Doc, EJson, Msg}) ->
-            {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, []))};
-        ({Options, Doc, EJson, Msg}) ->
-            {Msg, ?_assertMatch(EJson, couch_doc:to_json_obj(Doc, Options))}
-    end, Cases).

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_file_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_file_tests.erl b/test/couchdb/couch_file_tests.erl
deleted file mode 100644
index ad13383..0000000
--- a/test/couchdb/couch_file_tests.erl
+++ /dev/null
@@ -1,265 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_file_tests).
-
--include("couch_eunit.hrl").
-
--define(BLOCK_SIZE, 4096).
--define(setup(F), {setup, fun setup/0, fun teardown/1, F}).
--define(foreach(Fs), {foreach, fun setup/0, fun teardown/1, Fs}).
-
-
-setup() ->
-    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
-    Fd.
-
-teardown(Fd) ->
-    ok = couch_file:close(Fd).
-
-
-open_close_test_() ->
-    {
-        "Test for proper file open and close",
-        [
-            should_return_enoent_if_missed(),
-            should_ignore_invalid_flags_with_open(),
-            ?setup(fun should_return_pid_on_file_open/1),
-            should_close_file_properly(),
-            ?setup(fun should_create_empty_new_files/1)
-        ]
-    }.
-
-should_return_enoent_if_missed() ->
-    ?_assertEqual({error, enoent}, couch_file:open("not a real file")).
-
-should_ignore_invalid_flags_with_open() ->
-    ?_assertMatch({ok, _},
-                  couch_file:open(?tempfile(), [create, invalid_option])).
-
-should_return_pid_on_file_open(Fd) ->
-    ?_assert(is_pid(Fd)).
-
-should_close_file_properly() ->
-    {ok, Fd} = couch_file:open(?tempfile(), [create, overwrite]),
-    ok = couch_file:close(Fd),
-    ?_assert(true).
-
-should_create_empty_new_files(Fd) ->
-    ?_assertMatch({ok, 0}, couch_file:bytes(Fd)).
-
-
-read_write_test_() ->
-    {
-        "Common file read/write tests",
-        ?foreach([
-            fun should_increase_file_size_on_write/1,
-            fun should_return_current_file_size_on_write/1,
-            fun should_write_and_read_term/1,
-            fun should_write_and_read_binary/1,
-            fun should_write_and_read_large_binary/1,
-            fun should_return_term_as_binary_for_reading_binary/1,
-            fun should_read_term_written_as_binary/1,
-            fun should_read_iolist/1,
-            fun should_fsync/1,
-            fun should_not_read_beyond_eof/1,
-            fun should_truncate/1
-        ])
-    }.
-
-
-should_increase_file_size_on_write(Fd) ->
-    {ok, 0, _} = couch_file:append_term(Fd, foo),
-    {ok, Size} = couch_file:bytes(Fd),
-    ?_assert(Size > 0).
-
-should_return_current_file_size_on_write(Fd) ->
-    {ok, 0, _} = couch_file:append_term(Fd, foo),
-    {ok, Size} = couch_file:bytes(Fd),
-    ?_assertMatch({ok, Size, _}, couch_file:append_term(Fd, bar)).
-
-should_write_and_read_term(Fd) ->
-    {ok, Pos, _} = couch_file:append_term(Fd, foo),
-    ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, Pos)).
-
-should_write_and_read_binary(Fd) ->
-    {ok, Pos, _} = couch_file:append_binary(Fd, <<"fancy!">>),
-    ?_assertMatch({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Pos)).
-
-should_return_term_as_binary_for_reading_binary(Fd) ->
-    {ok, Pos, _} = couch_file:append_term(Fd, foo),
-    Foo = couch_compress:compress(foo, snappy),
-    ?_assertMatch({ok, Foo}, couch_file:pread_binary(Fd, Pos)).
-
-should_read_term_written_as_binary(Fd) ->
-    {ok, Pos, _} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
-    ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, Pos)).
-
-should_write_and_read_large_binary(Fd) ->
-    BigBin = list_to_binary(lists:duplicate(100000, 0)),
-    {ok, Pos, _} = couch_file:append_binary(Fd, BigBin),
-    ?_assertMatch({ok, BigBin}, couch_file:pread_binary(Fd, Pos)).
-
-should_read_iolist(Fd) ->
-    %% append_binary == append_iolist?
-    %% Possible bug in pread_iolist or iolist() -> append_binary
-    {ok, Pos, _} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
-    {ok, IoList} = couch_file:pread_iolist(Fd, Pos),
-    ?_assertMatch(<<"foombam">>, iolist_to_binary(IoList)).
-
-should_fsync(Fd) ->
-    {"How does on test fsync?", ?_assertMatch(ok, couch_file:sync(Fd))}.
-
-should_not_read_beyond_eof(_) ->
-    {"No idea how to test reading beyond EOF", ?_assert(true)}.
-
-should_truncate(Fd) ->
-    {ok, 0, _} = couch_file:append_term(Fd, foo),
-    {ok, Size} = couch_file:bytes(Fd),
-    BigBin = list_to_binary(lists:duplicate(100000, 0)),
-    {ok, _, _} = couch_file:append_binary(Fd, BigBin),
-    ok = couch_file:truncate(Fd, Size),
-    ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, 0)).
-
-
-header_test_() ->
-    {
-        "File header read/write tests",
-        [
-            ?foreach([
-                fun should_write_and_read_atom_header/1,
-                fun should_write_and_read_tuple_header/1,
-                fun should_write_and_read_second_header/1,
-                fun should_truncate_second_header/1,
-                fun should_produce_same_file_size_on_rewrite/1,
-                fun should_save_headers_larger_than_block_size/1
-            ]),
-            should_recover_header_marker_corruption(),
-            should_recover_header_size_corruption(),
-            should_recover_header_md5sig_corruption(),
-            should_recover_header_data_corruption()
-        ]
-    }.
-
-
-should_write_and_read_atom_header(Fd) ->
-    ok = couch_file:write_header(Fd, hello),
-    ?_assertMatch({ok, hello}, couch_file:read_header(Fd)).
-
-should_write_and_read_tuple_header(Fd) ->
-    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
-    ?_assertMatch({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd)).
-
-should_write_and_read_second_header(Fd) ->
-    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
-    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
-    ?_assertMatch({ok, [foo, <<"more">>]}, couch_file:read_header(Fd)).
-
-should_truncate_second_header(Fd) ->
-    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
-    {ok, Size} = couch_file:bytes(Fd),
-    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
-    ok = couch_file:truncate(Fd, Size),
-    ?_assertMatch({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd)).
-
-should_produce_same_file_size_on_rewrite(Fd) ->
-    ok = couch_file:write_header(Fd, {<<"some_data">>, 32}),
-    {ok, Size1} = couch_file:bytes(Fd),
-    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
-    {ok, Size2} = couch_file:bytes(Fd),
-    ok = couch_file:truncate(Fd, Size1),
-    ok = couch_file:write_header(Fd, [foo, <<"more">>]),
-    ?_assertMatch({ok, Size2}, couch_file:bytes(Fd)).
-
-should_save_headers_larger_than_block_size(Fd) ->
-    Header = erlang:make_tuple(5000, <<"CouchDB">>),
-    couch_file:write_header(Fd, Header),
-    {"COUCHDB-1319", ?_assertMatch({ok, Header}, couch_file:read_header(Fd))}.
-
-
-should_recover_header_marker_corruption() ->
-    ?_assertMatch(
-        ok,
-        check_header_recovery(
-            fun(CouchFd, RawFd, Expect, HeaderPos) ->
-                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
-                file:pwrite(RawFd, HeaderPos, <<0>>),
-                ?assertMatch(Expect, couch_file:read_header(CouchFd))
-            end)
-    ).
-
-should_recover_header_size_corruption() ->
-    ?_assertMatch(
-        ok,
-        check_header_recovery(
-            fun(CouchFd, RawFd, Expect, HeaderPos) ->
-                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
-                % +1 for 0x1 byte marker
-                file:pwrite(RawFd, HeaderPos + 1, <<10/integer>>),
-                ?assertMatch(Expect, couch_file:read_header(CouchFd))
-            end)
-    ).
-
-should_recover_header_md5sig_corruption() ->
-    ?_assertMatch(
-        ok,
-        check_header_recovery(
-            fun(CouchFd, RawFd, Expect, HeaderPos) ->
-                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
-                % +5 = +1 for 0x1 byte and +4 for term size.
-                file:pwrite(RawFd, HeaderPos + 5, <<"F01034F88D320B22">>),
-                ?assertMatch(Expect, couch_file:read_header(CouchFd))
-            end)
-    ).
-
-should_recover_header_data_corruption() ->
-    ?_assertMatch(
-        ok,
-        check_header_recovery(
-            fun(CouchFd, RawFd, Expect, HeaderPos) ->
-                ?assertNotMatch(Expect, couch_file:read_header(CouchFd)),
-                % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig
-                file:pwrite(RawFd, HeaderPos + 21, <<"some data goes here!">>),
-                ?assertMatch(Expect, couch_file:read_header(CouchFd))
-            end)
-    ).
-
-
-check_header_recovery(CheckFun) ->
-    Path = ?tempfile(),
-    {ok, Fd} = couch_file:open(Path, [create, overwrite]),
-    {ok, RawFd} = file:open(Path, [read, write, raw, binary]),
-
-    {ok, _} = write_random_data(Fd),
-    ExpectHeader = {some_atom, <<"a binary">>, 756},
-    ok = couch_file:write_header(Fd, ExpectHeader),
-
-    {ok, HeaderPos} = write_random_data(Fd),
-    ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}),
-
-    CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos),
-
-    ok = file:close(RawFd),
-    ok = couch_file:close(Fd),
-    ok.
-
-write_random_data(Fd) ->
-    write_random_data(Fd, 100 + random:uniform(1000)).
-
-write_random_data(Fd, 0) ->
-    {ok, Bytes} = couch_file:bytes(Fd),
-    {ok, (1 + Bytes div ?BLOCK_SIZE) * ?BLOCK_SIZE};
-write_random_data(Fd, N) ->
-    Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]],
-    Term = lists:nth(random:uniform(4) + 1, Choices),
-    {ok, _, _} = couch_file:append_term(Fd, Term),
-    write_random_data(Fd, N - 1).

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_key_tree_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_key_tree_tests.erl b/test/couchdb/couch_key_tree_tests.erl
deleted file mode 100644
index 753ecc4..0000000
--- a/test/couchdb/couch_key_tree_tests.erl
+++ /dev/null
@@ -1,380 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_key_tree_tests).
-
--include("couch_eunit.hrl").
-
--define(DEPTH, 10).
-
-
-key_tree_merge_test_()->
-    {
-        "Key tree merge",
-        [
-            should_merge_with_empty_tree(),
-            should_merge_reflexive(),
-            should_merge_prefix_of_a_tree_with_tree(),
-            should_produce_conflict_on_merge_with_unrelated_branch(),
-            should_merge_reflexive_for_child_nodes(),
-            should_merge_tree_to_itself(),
-            should_merge_tree_of_odd_length(),
-            should_merge_tree_with_stem(),
-            should_merge_with_stem_at_deeper_level(),
-            should_merge_with_stem_at_deeper_level_with_deeper_paths(),
-            should_merge_single_tree_with_deeper_stem(),
-            should_merge_tree_with_large_stem(),
-            should_merge_stems(),
-            should_create_conflicts_on_merge(),
-            should_create_no_conflicts_on_merge(),
-            should_ignore_conflicting_branch()
-        ]
-    }.
-
-key_tree_missing_leaves_test_()->
-    {
-        "Missing tree leaves",
-        [
-            should_not_find_missing_leaves(),
-            should_find_missing_leaves()
-        ]
-    }.
-
-key_tree_remove_leaves_test_()->
-    {
-        "Remove tree leaves",
-        [
-            should_have_no_effect_on_removing_no_leaves(),
-            should_have_no_effect_on_removing_non_existant_branch(),
-            should_remove_leaf(),
-            should_produce_empty_tree_on_removing_all_leaves(),
-            should_have_no_effect_on_removing_non_existant_node(),
-            should_produce_empty_tree_on_removing_last_leaf()
-        ]
-    }.
-
-key_tree_get_leaves_test_()->
-    {
-        "Leaves retrieving",
-        [
-            should_extract_subtree(),
-            should_extract_subsubtree(),
-            should_gather_non_existant_leaf(),
-            should_gather_leaf(),
-            shoul_gather_multiple_leaves(),
-            should_retrieve_full_key_path(),
-            should_retrieve_full_key_path_for_node(),
-            should_retrieve_leaves_with_parent_node(),
-            should_retrieve_all_leaves()
-        ]
-    }.
-
-key_tree_leaf_counting_test_()->
-    {
-        "Leaf counting",
-        [
-            should_have_no_leaves_for_empty_tree(),
-            should_have_single_leaf_for_tree_with_single_node(),
-            should_have_two_leaves_for_tree_with_chindler_siblings(),
-            should_not_affect_on_leaf_counting_for_stemmed_tree()
-        ]
-    }.
-
-key_tree_stemming_test_()->
-    {
-        "Stemming",
-        [
-            should_have_no_effect_for_stemming_more_levels_than_exists(),
-            should_return_one_deepest_node(),
-            should_return_two_deepest_nodes()
-        ]
-    }.
-
-
-should_merge_with_empty_tree()->
-    One = {1, {"1","foo",[]}},
-    ?_assertEqual({[One], no_conflicts},
-                  couch_key_tree:merge([], One, ?DEPTH)).
-
-should_merge_reflexive()->
-    One = {1, {"1","foo",[]}},
-    ?_assertEqual({[One], no_conflicts},
-                  couch_key_tree:merge([One], One, ?DEPTH)).
-
-should_merge_prefix_of_a_tree_with_tree()->
-    One = {1, {"1","foo",[]}},
-    TwoSibs = [{1, {"1","foo",[]}},
-               {1, {"2","foo",[]}}],
-    ?_assertEqual({TwoSibs, no_conflicts},
-                  couch_key_tree:merge(TwoSibs, One, ?DEPTH)).
-
-should_produce_conflict_on_merge_with_unrelated_branch()->
-    TwoSibs = [{1, {"1","foo",[]}},
-               {1, {"2","foo",[]}}],
-    Three = {1, {"3","foo",[]}},
-    ThreeSibs = [{1, {"1","foo",[]}},
-                 {1, {"2","foo",[]}},
-                 {1, {"3","foo",[]}}],
-    ?_assertEqual({ThreeSibs, conflicts},
-                  couch_key_tree:merge(TwoSibs, Three, ?DEPTH)).
-
-should_merge_reflexive_for_child_nodes()->
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-    ?_assertEqual({[TwoChild], no_conflicts},
-                  couch_key_tree:merge([TwoChild], TwoChild, ?DEPTH)).
-
-should_merge_tree_to_itself()->
-    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
-                                    {"1b", "bar", []}]}},
-    ?_assertEqual({[TwoChildSibs], no_conflicts},
-                  couch_key_tree:merge([TwoChildSibs], TwoChildSibs, ?DEPTH)).
-
-should_merge_tree_of_odd_length()->
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
-                                    {"1b", "bar", []}]}},
-    TwoChildPlusSibs = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
-                                        {"1b", "bar", []}]}},
-
-    ?_assertEqual({[TwoChildPlusSibs], no_conflicts},
-                  couch_key_tree:merge([TwoChild], TwoChildSibs, ?DEPTH)).
-
-should_merge_tree_with_stem()->
-    Stemmed = {2, {"1a", "bar", []}},
-    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
-                                    {"1b", "bar", []}]}},
-
-    ?_assertEqual({[TwoChildSibs], no_conflicts},
-                  couch_key_tree:merge([TwoChildSibs], Stemmed, ?DEPTH)).
-
-should_merge_with_stem_at_deeper_level()->
-    Stemmed = {3, {"1bb", "boo", []}},
-    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
-                                    {"1b", "bar", [{"1bb", "boo", []}]}]}},
-    ?_assertEqual({[TwoChildSibs], no_conflicts},
-                  couch_key_tree:merge([TwoChildSibs], Stemmed, ?DEPTH)).
-
-should_merge_with_stem_at_deeper_level_with_deeper_paths()->
-    Stemmed = {3, {"1bb", "boo", []}},
-    StemmedTwoChildSibs = [{2,{"1a", "bar", []}},
-                           {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
-    ?_assertEqual({StemmedTwoChildSibs, no_conflicts},
-                  couch_key_tree:merge(StemmedTwoChildSibs, Stemmed, ?DEPTH)).
-
-should_merge_single_tree_with_deeper_stem()->
-    Stemmed = {3, {"1aa", "bar", []}},
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-    ?_assertEqual({[TwoChild], no_conflicts},
-                  couch_key_tree:merge([TwoChild], Stemmed, ?DEPTH)).
-
-should_merge_tree_with_large_stem()->
-    Stemmed = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-    ?_assertEqual({[TwoChild], no_conflicts},
-                  couch_key_tree:merge([TwoChild], Stemmed, ?DEPTH)).
-
-should_merge_stems()->
-    StemmedA = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
-    StemmedB = {3, {"1aa", "bar", []}},
-    ?_assertEqual({[StemmedA], no_conflicts},
-                  couch_key_tree:merge([StemmedA], StemmedB, ?DEPTH)).
-
-should_create_conflicts_on_merge()->
-    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
-    Stemmed = {3, {"1aa", "bar", []}},
-    ?_assertEqual({[OneChild, Stemmed], conflicts},
-                  couch_key_tree:merge([OneChild], Stemmed, ?DEPTH)).
-
-should_create_no_conflicts_on_merge()->
-    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
-    Stemmed = {3, {"1aa", "bar", []}},
-    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-    ?_assertEqual({[TwoChild], no_conflicts},
-                  couch_key_tree:merge([OneChild, Stemmed], TwoChild, ?DEPTH)).
-
-should_ignore_conflicting_branch()->
-    %% this test is based on couch-902-test-case2.py
-    %% foo has conflicts from replication at depth two
-    %% foo3 is the current value
-    Foo = {1, {"foo",
-               "val1",
-               [{"foo2","val2",[]},
-                {"foo3", "val3", []}
-               ]}},
-    %% foo now has an attachment added, which leads to foo4 and val4
-    %% off foo3
-    Bar = {1, {"foo",
-               [],
-               [{"foo3",
-                 [],
-                 [{"foo4","val4",[]}
-                  ]}]}},
-    %% this is what the merge returns
-    %% note that it ignore the conflicting branch as there's no match
-    FooBar = {1, {"foo",
-               "val1",
-               [{"foo2","val2",[]},
-                {"foo3", "val3", [{"foo4","val4",[]}]}
-               ]}},
-    {
-        "COUCHDB-902",
-        ?_assertEqual({[FooBar], no_conflicts},
-                      couch_key_tree:merge([Foo], Bar, ?DEPTH))
-    }.
-
-should_not_find_missing_leaves()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual([],
-                  couch_key_tree:find_missing(TwoChildSibs,
-                                              [{0,"1"}, {1,"1a"}])).
-
-should_find_missing_leaves()->
-    Stemmed1 = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    Stemmed2 = [{2, {"1aa", "bar", []}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    [
-        ?_assertEqual(
-            [{0, "10"}, {100, "x"}],
-            couch_key_tree:find_missing(
-                TwoChildSibs,
-                [{0,"1"}, {0, "10"}, {1,"1a"}, {100, "x"}])),
-        ?_assertEqual(
-            [{0, "1"}, {100, "x"}],
-            couch_key_tree:find_missing(
-                Stemmed1,
-                [{0,"1"}, {1,"1a"}, {100, "x"}])),
-        ?_assertEqual(
-            [{0, "1"}, {1,"1a"}, {100, "x"}],
-            couch_key_tree:find_missing(
-                Stemmed2,
-                [{0,"1"}, {1,"1a"}, {100, "x"}]))
-    ].
-
-should_have_no_effect_on_removing_no_leaves()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({TwoChildSibs, []},
-                  couch_key_tree:remove_leafs(TwoChildSibs,
-                                              [])).
-
-should_have_no_effect_on_removing_non_existant_branch()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({TwoChildSibs, []},
-                  couch_key_tree:remove_leafs(TwoChildSibs,
-                                              [{0, "1"}])).
-
-should_remove_leaf()->
-    OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({OneChild, [{1, "1b"}]},
-                  couch_key_tree:remove_leafs(TwoChildSibs,
-                                              [{1, "1b"}])).
-
-should_produce_empty_tree_on_removing_all_leaves()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[], [{1, "1b"}, {1, "1a"}]},
-                  couch_key_tree:remove_leafs(TwoChildSibs,
-                                              [{1, "1b"}, {1, "1a"}])).
-
-should_have_no_effect_on_removing_non_existant_node()->
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    ?_assertEqual({Stemmed, []},
-                  couch_key_tree:remove_leafs(Stemmed,
-                                              [{1, "1a"}])).
-
-should_produce_empty_tree_on_removing_last_leaf()->
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    ?_assertEqual({[], [{2, "1aa"}]},
-                  couch_key_tree:remove_leafs(Stemmed,
-                                              [{2, "1aa"}])).
-
-should_extract_subtree()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[{"foo", {0, ["1"]}}],[]},
-                  couch_key_tree:get(TwoChildSibs, [{0, "1"}])).
-
-should_extract_subsubtree()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[{"bar", {1, ["1a", "1"]}}],[]},
-                  couch_key_tree:get(TwoChildSibs, [{1, "1a"}])).
-
-should_gather_non_existant_leaf()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[],[{0, "x"}]},
-                  couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "x"}])).
-
-should_gather_leaf()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[{"bar", {1, ["1a","1"]}}],[]},
-                  couch_key_tree:get_key_leafs(TwoChildSibs, [{1, "1a"}])).
-
-shoul_gather_multiple_leaves()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
-                  couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}])).
-
-should_retrieve_full_key_path()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[{0,[{"1", "foo"}]}],[]},
-                  couch_key_tree:get_full_key_paths(TwoChildSibs, [{0, "1"}])).
-
-should_retrieve_full_key_path_for_node()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual({[{1,[{"1a", "bar"},{"1", "foo"}]}],[]},
-                  couch_key_tree:get_full_key_paths(TwoChildSibs, [{1, "1a"}])).
-
-should_retrieve_leaves_with_parent_node()->
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    [
-        ?_assertEqual([{2, [{"1aa", "bar"},{"1a", "bar"}]}],
-                      couch_key_tree:get_all_leafs_full(Stemmed)),
-        ?_assertEqual([{1, [{"1a", "bar"},{"1", "foo"}]},
-                       {1, [{"1b", "bar"},{"1", "foo"}]}],
-                      couch_key_tree:get_all_leafs_full(TwoChildSibs))
-    ].
-
-should_retrieve_all_leaves()->
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    [
-        ?_assertEqual([{"bar", {2, ["1aa","1a"]}}],
-                      couch_key_tree:get_all_leafs(Stemmed)),
-        ?_assertEqual([{"bar", {1, ["1a", "1"]}}, {"bar", {1, ["1b","1"]}}],
-                      couch_key_tree:get_all_leafs(TwoChildSibs))
-    ].
-
-should_have_no_leaves_for_empty_tree()->
-    ?_assertEqual(0, couch_key_tree:count_leafs([])).
-
-should_have_single_leaf_for_tree_with_single_node()->
-    ?_assertEqual(1, couch_key_tree:count_leafs([{0, {"1","foo",[]}}])).
-
-should_have_two_leaves_for_tree_with_chindler_siblings()->
-    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
-    ?_assertEqual(2, couch_key_tree:count_leafs(TwoChildSibs)).
-
-should_not_affect_on_leaf_counting_for_stemmed_tree()->
-    ?_assertEqual(1, couch_key_tree:count_leafs([{2, {"1bb", "boo", []}}])).
-
-should_have_no_effect_for_stemming_more_levels_than_exists()->
-    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
-    ?_assertEqual(TwoChild, couch_key_tree:stem(TwoChild, 3)).
-
-should_return_one_deepest_node()->
-    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
-    Stemmed = [{2, {"1aa", "bar", []}}],
-    ?_assertEqual(Stemmed, couch_key_tree:stem(TwoChild, 1)).
-
-should_return_two_deepest_nodes()->
-    TwoChild = [{0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}}],
-    Stemmed = [{1, {"1a", "bar", [{"1aa", "bar", []}]}}],
-    ?_assertEqual(Stemmed, couch_key_tree:stem(TwoChild, 2)).

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_passwords_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_passwords_tests.erl b/test/couchdb/couch_passwords_tests.erl
deleted file mode 100644
index 116265c..0000000
--- a/test/couchdb/couch_passwords_tests.erl
+++ /dev/null
@@ -1,54 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_passwords_tests).
-
--include("couch_eunit.hrl").
-
-
-pbkdf2_test_()->
-    {"PBKDF2",
-     [
-         {"Iterations: 1, length: 20",
-          ?_assertEqual(
-              {ok, <<"0c60c80f961f0e71f3a9b524af6012062fe037a6">>},
-              couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 1, 20))},
-
-         {"Iterations: 2, length: 20",
-          ?_assertEqual(
-              {ok, <<"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957">>},
-              couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 2, 20))},
-
-         {"Iterations: 4096, length: 20",
-          ?_assertEqual(
-              {ok, <<"4b007901b765489abead49d926f721d065a429c1">>},
-              couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 4096, 20))},
-
-         {"Iterations: 4096, length: 25",
-          ?_assertEqual(
-              {ok, <<"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038">>},
-              couch_passwords:pbkdf2(<<"passwordPASSWORDpassword">>,
-                                     <<"saltSALTsaltSALTsaltSALTsaltSALTsalt">>,
-                                     4096, 25))},
-         {"Null byte",
-          ?_assertEqual(
-              {ok, <<"56fa6aa75548099dcc37d7f03425e0c3">>},
-              couch_passwords:pbkdf2(<<"pass\0word">>,
-                                     <<"sa\0lt">>,
-                                     4096, 16))},
-
-         {timeout, 180,  %% this may runs too long on slow hosts
-          {"Iterations: 16777216 - this may take some time",
-           ?_assertEqual(
-               {ok, <<"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984">>},
-               couch_passwords:pbkdf2(<<"password">>, <<"salt">>, 16777216, 20)
-           )}}]}.

http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/0ce84d8e/test/couchdb/couch_ref_counter_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb/couch_ref_counter_tests.erl b/test/couchdb/couch_ref_counter_tests.erl
deleted file mode 100644
index b7e97b4..0000000
--- a/test/couchdb/couch_ref_counter_tests.erl
+++ /dev/null
@@ -1,107 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_ref_counter_tests).
-
--include("couch_eunit.hrl").
--include_lib("couchdb/couch_db.hrl").
-
--define(TIMEOUT, 1000).
-
-
-setup() ->
-    {ok, RefCtr} = couch_ref_counter:start([]),
-    ChildPid = spawn(fun() -> loop() end),
-    {RefCtr, ChildPid}.
-
-teardown({_, ChildPid}) ->
-    erlang:monitor(process, ChildPid),
-    ChildPid ! close,
-    wait().
-
-
-couch_ref_counter_test_() ->
-    {
-        "CouchDB reference counter tests",
-        {
-            foreach,
-            fun setup/0, fun teardown/1,
-            [
-                fun should_initialize_with_calling_process_as_referrer/1,
-                fun should_ignore_unknown_pid/1,
-                fun should_increment_counter_on_pid_add/1,
-                fun should_not_increase_counter_on_readding_same_pid/1,
-                fun should_drop_ref_for_double_added_pid/1,
-                fun should_decrement_counter_on_pid_drop/1,
-                fun should_add_after_drop/1,
-                fun should_decrement_counter_on_process_exit/1
-
-            ]
-        }
-    }.
-
-
-should_initialize_with_calling_process_as_referrer({RefCtr, _}) ->
-    ?_assertEqual(1, couch_ref_counter:count(RefCtr)).
-
-should_ignore_unknown_pid({RefCtr, ChildPid}) ->
-    ?_assertEqual(ok, couch_ref_counter:drop(RefCtr, ChildPid)).
-
-should_increment_counter_on_pid_add({RefCtr, ChildPid}) ->
-    couch_ref_counter:add(RefCtr, ChildPid),
-    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
-
-should_not_increase_counter_on_readding_same_pid({RefCtr, ChildPid}) ->
-    couch_ref_counter:add(RefCtr, ChildPid),
-    couch_ref_counter:add(RefCtr, ChildPid),
-    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
-
-should_drop_ref_for_double_added_pid({RefCtr, ChildPid}) ->
-    couch_ref_counter:add(RefCtr, ChildPid),
-    couch_ref_counter:add(RefCtr, ChildPid),
-    couch_ref_counter:drop(RefCtr, ChildPid),
-    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
-
-should_decrement_counter_on_pid_drop({RefCtr, ChildPid}) ->
-    couch_ref_counter:add(RefCtr, ChildPid),
-    couch_ref_counter:drop(RefCtr, ChildPid),
-    ?_assertEqual(1, couch_ref_counter:count(RefCtr)).
-
-should_add_after_drop({RefCtr, ChildPid}) ->
-    couch_ref_counter:add(RefCtr, ChildPid),
-    couch_ref_counter:drop(RefCtr, ChildPid),
-    couch_ref_counter:add(RefCtr, ChildPid),
-    ?_assertEqual(2, couch_ref_counter:count(RefCtr)).
-
-should_decrement_counter_on_process_exit({RefCtr, ChildPid}) ->
-    ?_assertEqual(1,
-        begin
-            couch_ref_counter:add(RefCtr, ChildPid),
-            erlang:monitor(process, ChildPid),
-            ChildPid ! close,
-            wait(),
-            couch_ref_counter:count(RefCtr)
-        end).
-
-
-loop() ->
-    receive
-        close -> ok
-    end.
-
-wait() ->
-    receive
-        {'DOWN', _, _, _, _} ->
-            ok
-    after ?TIMEOUT ->
-        throw(timeout_error)
-    end.