You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by da...@apache.org on 2019/07/23 20:13:26 UTC

[couchdb] 17/25: Fix code and tests to pass eunit

This is an automated email from the ASF dual-hosted git repository.

davisp pushed a commit to branch prototype/views
in repository https://gitbox.apache.org/repos/asf/couchdb.git

commit 55214bb1ca7742b1b7bd50d032b9c311fd29c675
Author: Paul J. Davis <pa...@gmail.com>
AuthorDate: Mon Jul 22 16:23:44 2019 -0500

    Fix code and tests to pass eunit
    
    Earlier code was just what I had gotten for moving logic around and
    simplfiying a bit or two. Now things compile and most tests pass. I've
    still got two tests to fix for map query arguments.
---
 src/couch_views/src/couch_views.erl                |  37 +-
 src/couch_views/src/couch_views_encoding.erl       |   2 +-
 src/couch_views/src/couch_views_fdb.erl            |  40 +-
 src/couch_views/src/couch_views_indexer.erl        |  37 +-
 src/couch_views/src/couch_views_jobs.erl           |  19 +-
 src/couch_views/src/couch_views_reader.erl         |  65 ++-
 src/couch_views/src/couch_views_sup.erl            |   4 +-
 src/couch_views/test/couch_views_encoding_test.erl |  19 +-
 src/couch_views/test/couch_views_indexer_test.erl  | 580 ++++++++++++++-------
 src/couch_views/test/couch_views_map_test.erl      |  56 +-
 10 files changed, 570 insertions(+), 289 deletions(-)

diff --git a/src/couch_views/src/couch_views.erl b/src/couch_views/src/couch_views.erl
index e10675b..e619a67 100644
--- a/src/couch_views/src/couch_views.erl
+++ b/src/couch_views/src/couch_views.erl
@@ -19,7 +19,7 @@
 -include("couch_mrview/include/couch_mrview.hrl").
 
 
-query(Db, DDoc, ViewName, Callback, Acc0, QueryArgs0) ->
+query(Db, DDoc, ViewName, Callback, Acc0, Args0) ->
     case fabric2_db:is_users_db(Db) of
         true ->
             fabric2_users_db:after_doc_read(DDoc, Db);
@@ -34,20 +34,20 @@ query(Db, DDoc, ViewName, Callback, Acc0, QueryArgs0) ->
         views = Views
     } = Mrst,
 
-    View = get_view(ViewName, Views),
-    QueryArgs1 = couch_mrview_util:set_view_type(QueryArgs0, View, Views),
-    QueryArgs2 = couch_mrview_util:validate_args(QueryArgs1),
-    case is_reduce_view(QueryArgs2) of
+    Args1 = to_mrargs(Args0),
+    Args2 = couch_mrview_util:set_view_type(Args1, ViewName, Views),
+    Args3 = couch_mrview_util:validate_args(Args2),
+    case is_reduce_view(Args3) of
         true -> throw({not_implemented});
         false -> ok
     end,
 
-    ok = maybe_update_view(Db, Mrst, QueryArgs2),
+    ok = maybe_update_view(Db, Mrst, Args3),
 
     try
-        couch_views_reader:read(Db, Mrst, ViewName, Callback, Acc0, QueryArgs2)
+        couch_views_reader:read(Db, Mrst, ViewName, Callback, Acc0, Args3)
     after
-        UpdateAfter = QueryArgs2#mrargs.update == lazy,
+        UpdateAfter = Args3#mrargs.update == lazy,
         if UpdateAfter == false -> ok; true ->
             couch_views_jobs:build_view_async(Db, Mrst)
         end
@@ -75,14 +75,21 @@ maybe_update_view(Db, Mrst, _Args) ->
     end.
 
 
-get_view(ViewName, Views) ->
-    {value, View} = lists:search(fun(View) ->
-        lists:member(ViewName, View#mrview.map_names)
-    end, Views),
-    View.
-
-
 is_reduce_view(#mrargs{view_type = ViewType}) ->
     ViewType =:= red;
 is_reduce_view({Reduce, _, _}) ->
     Reduce =:= red.
+
+
+to_mrargs(#mrargs{} = Args) ->
+    Args;
+
+to_mrargs(#{} = Args) ->
+    Fields = record_info(fields, mrargs),
+    Indexes = lists:seq(2, record_info(size, mrargs)),
+    LU = lists:zip(Fields, Indexes),
+
+    maps:fold(fun(Key, Value, Acc) ->
+        Index = fabric2_util:get_value(couch_util:to_existing_atom(Key), LU),
+        setelement(Index, Acc, Value)
+    end, #mrargs{}, Args).
diff --git a/src/couch_views/src/couch_views_encoding.erl b/src/couch_views/src/couch_views_encoding.erl
index 9f76ea6..f80ddfe 100644
--- a/src/couch_views/src/couch_views_encoding.erl
+++ b/src/couch_views/src/couch_views_encoding.erl
@@ -30,7 +30,7 @@
 
 
 encode(X) ->
-    encode_int(X, value).
+    encode(X, value).
 
 
 encode(X, Type) when Type == key; Type == value ->
diff --git a/src/couch_views/src/couch_views_fdb.erl b/src/couch_views/src/couch_views_fdb.erl
index dc1840d..16da0fc 100644
--- a/src/couch_views/src/couch_views_fdb.erl
+++ b/src/couch_views/src/couch_views_fdb.erl
@@ -21,6 +21,10 @@
     write_doc/4
 ]).
 
+-ifdef(TEST).
+-compile(export_all).
+-compile(nowarn_export_all).
+-endif.
 
 -define(LIST_VALUE, 0).
 -define(JSON_VALUE, 1).
@@ -88,10 +92,14 @@ fold_map_idx(TxDb, Sig, ViewId, Options, Callback, Acc0) ->
             {fun fold_rev/2, RevAcc}
     end,
 
-    fabric2_db:fold_range(TxDb, MapIdxPrefix, Fun, Acc, Options).
+    #{
+        acc := Acc1
+    } = fabric2_fdb:fold_range(TxDb, MapIdxPrefix, Fun, Acc, Options),
+
+    Acc1.
 
 
-write_doc(TxDb, Sig, #{deleted := true} = Doc, _ViewIds) ->
+write_doc(TxDb, Sig, _ViewIds, #{deleted := true} = Doc) ->
     #{
         id := DocId
     } = Doc,
@@ -103,7 +111,7 @@ write_doc(TxDb, Sig, #{deleted := true} = Doc, _ViewIds) ->
         clear_map_idx(TxDb, Sig, ViewId, DocId, ViewKeys)
     end, ExistingViewKeys);
 
-write_doc(TxDb, Sig, Doc, ViewIds) ->
+write_doc(TxDb, Sig, ViewIds, Doc) ->
     #{
         id := DocId,
         results := Results
@@ -201,7 +209,7 @@ fold_rev({RowKey, EncodedOriginalKey}, #{next := key} = Acc) ->
     UserAcc1 = UserCallback(DocId, Key, Value, UserAcc0),
 
     Acc#{
-        next := val,
+        next := value,
         value := undefined,
         sort_key := undefined,
         docid := undefined,
@@ -209,6 +217,7 @@ fold_rev({RowKey, EncodedOriginalKey}, #{next := key} = Acc) ->
         acc := UserAcc1
     }.
 
+
 clear_id_idx(TxDb, Sig, DocId) ->
     #{
         tx := Tx,
@@ -239,7 +248,9 @@ update_id_idx(TxDb, Sig, ViewId, DocId, NewRows) ->
 
     Unique = lists:usort([K || {K, _V} <- NewRows]),
 
-    Key = id_idx_key(DbPrefix, Sig, ViewId, DocId),
+    couch_log:error("Updating ID index: ~p ~p ~p ~p", [ViewId, DocId, NewRows, Unique]),
+
+    Key = id_idx_key(DbPrefix, Sig, DocId, ViewId),
     Val = couch_views_encoding:encode(Unique),
     ok = erlfdb:set(Tx, Key, Val).
 
@@ -264,8 +275,8 @@ update_map_idx(TxDb, Sig, ViewId, DocId, ExistingKeys, NewRows) ->
     lists:foreach(fun({DupeId, Key1, Key2, Val}) ->
         KK = map_idx_key(MapIdxPrefix, {Key1, DocId}, DupeId, ?VIEW_ROW_KEY),
         VK = map_idx_key(MapIdxPrefix, {Key1, DocId}, DupeId, ?VIEW_ROW_VALUE),
-        ok = erlfdn:store(Tx, KK, Key2),
-        ok = erlfdb:store(Tx, VK, Val)
+        ok = erlfdb:set(Tx, KK, Key2),
+        ok = erlfdb:set(Tx, VK, Val)
     end, KVsToAdd).
 
 
@@ -305,12 +316,12 @@ map_idx_prefix(DbPrefix, Sig, ViewId) ->
 
 map_idx_key(MapIdxPrefix, MapKey, DupeId, Type) ->
     Key = {MapKey, DupeId, Type},
-    erldb_tuple:encode(Key, MapIdxPrefix).
+    erlfdb_tuple:pack(Key, MapIdxPrefix).
 
 
 map_idx_range(DbPrefix, Sig, ViewId, MapKey, DocId) ->
     Encoded = couch_views_encoding:encode(MapKey, key),
-    Key = {?DB_VIEWS, Sig, ?VIEW_MAP_RANGE, ViewId, Encoded, DocId},
+    Key = {?DB_VIEWS, Sig, ?VIEW_MAP_RANGE, ViewId, {Encoded, DocId}},
     erlfdb_tuple:range(Key, DbPrefix).
 
 
@@ -326,10 +337,9 @@ process_rows(Rows) ->
         dict:append(K1, {K2, V}, Acc)
     end, dict:new(), Encoded),
 
-    {_, Labeled} = dict:fold(fun(K1, Vals) ->
-        lists:foldl(fun({K2, V}, {Count, Acc}) ->
+    dict:fold(fun(K1, Vals, DAcc) ->
+        {_, Labeled} = lists:foldl(fun({K2, V}, {Count, Acc}) ->
             {Count + 1, [{Count, K1, K2, V} | Acc]}
-        end, {0, []}, Vals)
-    end, [], Grouped),
-
-    Labeled.
+        end, {0, []}, Vals),
+        Labeled ++ DAcc
+    end, [], Grouped).
diff --git a/src/couch_views/src/couch_views_indexer.erl b/src/couch_views/src/couch_views_indexer.erl
index 91072a1..decec42 100644
--- a/src/couch_views/src/couch_views_indexer.erl
+++ b/src/couch_views/src/couch_views_indexer.erl
@@ -36,19 +36,24 @@ spawn_link() ->
 
 init() ->
     {ok, Job, Data} = couch_jobs:accept(?INDEX_JOB_TYPE, #{}),
+    couch_log:error("XKCD: GOT JOB: ~p~n", [Data]),
 
     #{
         <<"db_name">> := DbName,
         <<"ddoc_id">> := DDocId,
-        <<"sig">> := Sig
+        <<"sig">> := JobSig
     } = Data,
 
     {ok, Db} = fabric2_db:open(DbName, []),
     {ok, DDoc} = fabric2_db:open_doc(Db, DDocId),
     {ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
+    HexSig = fabric2_util:to_hex(Mrst#mrst.sig),
 
-    if Mrst#mrst.sig == Sig -> ok; true ->
-        couch_jobs:finish(Job, Data#{error => mismatched_signature}),
+    if  HexSig == JobSig -> ok; true ->
+        couch_jobs:finish(undefined, Job, Data#{
+            error => sig_changed,
+            reason => <<"Design document was modified">>
+        }),
         exit(normal)
     end,
 
@@ -74,10 +79,12 @@ update(#{} = Db, Mrst0, State0) ->
         % to populate our db and view sequences
         State1 = case State0 of
             #{db_seq := undefined} ->
+                ViewSeq = couch_views_fdb:get_update_seq(TxDb, Mrst0),
                 State0#{
                     tx_db := TxDb,
                     db_seq := fabric2_db:get_update_seq(TxDb),
-                    view_seq := couch_views_fdb:get_update_seq(TxDb, Mrst0)
+                    view_seq := ViewSeq,
+                    last_seq := ViewSeq
                 };
             _ ->
                 State0#{
@@ -95,7 +102,7 @@ update(#{} = Db, Mrst0, State0) ->
         } = State2,
 
         {Mrst1, MappedDocs} = map_docs(Mrst0, DocAcc),
-        write_docs(Db, Mrst1, MappedDocs, State2),
+        write_docs(TxDb, Mrst1, MappedDocs, State2),
 
         case Count < Limit of
             true ->
@@ -107,8 +114,7 @@ update(#{} = Db, Mrst0, State0) ->
                     tx_db := undefined,
                     count := 0,
                     doc_acc := [],
-                    view_seq := LastSeq,
-                    last_seq := undefined
+                    view_seq := LastSeq
                 }}
         end
     end),
@@ -208,7 +214,7 @@ write_docs(TxDb, Mrst, Docs, State) ->
         couch_views_fdb:write_doc(TxDb, Sig, ViewIds, Doc)
     end, Docs),
 
-    couch_views_fdb:update_view_seq(TxDb, Sig, LastSeq).
+    couch_views_fdb:set_update_seq(TxDb, Sig, LastSeq).
 
 
 start_query_server(#mrst{} = Mrst) ->
@@ -231,7 +237,20 @@ report_progress(State, UpdateType) ->
         last_seq := LastSeq
     } = State,
 
-    NewData = JobData#{view_seq => LastSeq},
+    #{
+        <<"db_name">> := DbName,
+        <<"ddoc_id">> := DDocId,
+        <<"sig">> := Sig
+    } = JobData,
+
+    % Reconstruct from scratch to remove any
+    % possible existing error state.
+    NewData = #{
+        <<"db_name">> => DbName,
+        <<"ddoc_id">> => DDocId,
+        <<"sig">> => Sig,
+        <<"view_seq">> => LastSeq
+    },
 
     case UpdateType of
         update ->
diff --git a/src/couch_views/src/couch_views_jobs.erl b/src/couch_views/src/couch_views_jobs.erl
index 9e299af..85264c6 100644
--- a/src/couch_views/src/couch_views_jobs.erl
+++ b/src/couch_views/src/couch_views_jobs.erl
@@ -49,7 +49,7 @@ wait_for_job(JobId, UpdateSeq) ->
             wait_for_job(JobId, Subscription, UpdateSeq);
         {ok, finished, Data} ->
             case Data of
-                #{view_seq := ViewSeq} when ViewSeq >= UpdateSeq ->
+                #{<<"view_sig">> := ViewSeq} when ViewSeq >= UpdateSeq ->
                     ok;
                 _ ->
                     retry
@@ -61,11 +61,13 @@ wait_for_job(JobId, Subscription, UpdateSeq) ->
     case wait(Subscription) of
         {error, Error} ->
             erlang:error(Error);
-        {finished, #{view_seq := ViewSeq}} when ViewSeq >= UpdateSeq ->
+        {finished, #{<<"error">> := Error, <<"reason">> := Reason}} ->
+            erlang:error({binary_to_atom(Error, latin1), Reason});
+        {finished, #{<<"view_seq">> := ViewSeq}} when ViewSeq >= UpdateSeq ->
             ok;
         {finished, _} ->
             wait_for_job(JobId, UpdateSeq);
-        {_State, #{view_seq := ViewSeq}} when ViewSeq >= UpdateSeq ->
+        {_State, #{<<"view_seq">> := ViewSeq}} when ViewSeq >= UpdateSeq ->
             couch_jobs:unsubscribe(Subscription),
             ok;
         {_, _} ->
@@ -77,7 +79,8 @@ job_id(#{name := DbName}, #mrst{sig = Sig}) ->
     job_id(DbName, Sig);
 
 job_id(DbName, Sig) ->
-    <<DbName/binary, Sig/binary>>.
+    HexSig = fabric2_util:to_hex(Sig),
+    <<DbName/binary, "-", HexSig/binary>>.
 
 
 job_data(Db, Mrst) ->
@@ -89,12 +92,14 @@ job_data(Db, Mrst) ->
     #{
         db_name => fabric2_db:name(Db),
         ddoc_id => DDocId,
-        sig => Sig
+        sig => fabric2_util:to_hex(Sig)
     }.
 
 
 wait(Subscription) ->
     case couch_jobs:wait(Subscription, infinity) of
-        {?INDEX_JOB_TYPE, _JobId, JobState, JobData} -> {JobState, JobData};
-        timeout -> {error, timeout}
+        {?INDEX_JOB_TYPE, _JobId, JobState, JobData} ->
+            {JobState, JobData};
+        timeout ->
+            {error, timeout}
     end.
diff --git a/src/couch_views/src/couch_views_reader.erl b/src/couch_views/src/couch_views_reader.erl
index 8d2bf5a..41a37eb 100644
--- a/src/couch_views/src/couch_views_reader.erl
+++ b/src/couch_views/src/couch_views_reader.erl
@@ -25,11 +25,12 @@
 
 read(Db, Mrst, ViewName, UserCallback, UserAcc0, Args) ->
     #mrst{
+        language = Lang,
         sig = Sig,
         views = Views
     } = Mrst,
 
-    ViewId = get_view_id(ViewName, Views),
+    ViewId = get_view_id(Lang, Args, ViewName, Views),
     Opts = mrargs_to_fdb_options(Args),
     Fun = fun handle_row/4,
 
@@ -59,8 +60,7 @@ read(Db, Mrst, ViewName, UserCallback, UserAcc0, Args) ->
             #{
                 acc := UserAcc2
             } = Acc1,
-
-            maybe_stop(UserCallback(complete, UserAcc2))
+            {ok, maybe_stop(UserCallback(complete, UserAcc2))}
         end)
     catch throw:{done, Out} ->
         {ok, Out}
@@ -68,7 +68,7 @@ read(Db, Mrst, ViewName, UserCallback, UserAcc0, Args) ->
 
 
 handle_row(_DocId, _Key, _Value, #{skip := Skip} = Acc) when Skip > 0 ->
-    {ok, Acc#{skip := Skip - 1}};
+    Acc#{skip := Skip - 1};
 
 handle_row(DocId, Key, Value, Acc) ->
     #{
@@ -88,7 +88,7 @@ handle_row(DocId, Key, Value, Acc) ->
         DocOpts0 = Args#mrargs.doc_options,
         DocOpts1 = DocOpts0 ++ case Args#mrargs.conflicts of
             true -> [conflicts];
-            false -> []
+            _ -> []
         end,
         DocObj = case fabric2_db:open_doc(TxDb, DocId, DocOpts1) of
             {ok, Doc} -> couch_doc:to_json_obj(Doc, DocOpts1);
@@ -101,49 +101,60 @@ handle_row(DocId, Key, Value, Acc) ->
     Acc#{acc := UserAcc1}.
 
 
-get_view_id(ViewName, Views) ->
-    {value, View} = lists:search(fun(View) ->
-        lists:member(ViewName, View#mrview.map_names)
-    end, Views),
-    View#mrview.id_num.
+get_view_id(Lang, Args, ViewName, Views) ->
+    case couch_mrview_util:extract_view(Lang, Args, ViewName, Views) of
+        {map, View, _Args} -> View#mrview.id_num;
+        {red, {_Idx, _Lang, View}} -> View#mrview.id_num
+    end.
 
 
 mrargs_to_fdb_options(Args) ->
     #mrargs{
-        start_key = StartKey,
+        start_key = StartKey0,
         start_key_docid = StartKeyDocId,
-        end_key = EndKey,
+        end_key = EndKey0,
         end_key_docid = EndKeyDocId,
         direction = Direction,
         limit = Limit,
+        skip = Skip,
         inclusive_end = InclusiveEnd
     } = Args,
 
-    StartKeyOpts = case {StartKey, StartKeyDocId} of
+    StartKey1 = if StartKey0 == undefined -> undefined; true ->
+        couch_views_encoding:encode(StartKey0)
+    end,
+
+    StartKeyOpts = case {StartKey1, StartKeyDocId} of
         {undefined, _} ->
             [];
-        {StartKey, undefined} ->
-            [{start_key, {StartKey}}];
-        {_, _} ->
-            [{start_key, {StartKey, StartKeyDocId}}]
+        {StartKey1, StartKeyDocId} ->
+            [{start_key, {StartKey1, StartKeyDocId}}]
+    end,
+
+    EndKey1 = if EndKey0 == undefined -> undefined; true ->
+        couch_views_encoding:encode(EndKey0)
     end,
 
-    EndKeyOpts = case {EndKey, EndKeyDocId} of
+    EndKeyOpts = case {EndKey1, EndKeyDocId} of
         {undefined, _} ->
             [];
-        {EndKey, undefined} when InclusiveEnd ->
-            [{end_key, {EndKey}}];
-        {EndKey, undefined} ->
-            [{end_key_gt, {EndKey}}];
-        {EndKey, EndKeyDocId} when InclusiveEnd ->
-            [{end_key, {EndKey, EndKeyDocId}}];
-        {EndKey, EndKeyDocId} ->
-            [{end_key_gt, {EndKey, EndKeyDocId}}]
+        {EndKey1, <<255>>} when not InclusiveEnd ->
+            % When inclusive_end=false we need to
+            % elide the default end_key_docid so as
+            % to not sort past the docids with the
+            % given end key.
+            [{end_key_gt, {EndKey1}}];
+        {EndKey1, EndKeyDocId} when not InclusiveEnd ->
+            [{end_key_gt, {EndKey1, EndKeyDocId}}];
+        {EndKey1, EndKeyDocId} when InclusiveEnd ->
+            [{end_key, {EndKey1, EndKeyDocId}}];
+        {EndKey1, EndKeyDocId} when InclusiveEnd ->
+            [{end_key_gt, {EndKey1, EndKeyDocId}}]
     end,
 
     [
         {dir, Direction},
-        {limit, Limit * 2},
+        {limit, Limit * 2 + Skip * 2},
         {streaming_mode, want_all}
     ] ++ StartKeyOpts ++ EndKeyOpts.
 
diff --git a/src/couch_views/src/couch_views_sup.erl b/src/couch_views/src/couch_views_sup.erl
index da7d796..7650fdf 100644
--- a/src/couch_views/src/couch_views_sup.erl
+++ b/src/couch_views/src/couch_views_sup.erl
@@ -39,8 +39,8 @@ init([]) ->
     },
     Children = [
         #{
-            id => couch_views_worker_server,
-            start => {couch_views_worker_server, start_link, []}
+            id => couch_views_server,
+            start => {couch_views_server, start_link, []}
         }
     ],
     {ok, {Flags, Children}}.
diff --git a/src/couch_views/test/couch_views_encoding_test.erl b/src/couch_views/test/couch_views_encoding_test.erl
index a73cb42..9282265 100644
--- a/src/couch_views/test/couch_views_encoding_test.erl
+++ b/src/couch_views/test/couch_views_encoding_test.erl
@@ -35,12 +35,7 @@ correct_ordering_test() ->
         false,
         true,
 
-        %  Then numbers
-        % 1,
-        % 2,
-        % 3.0,
-        % 4,
-
+        % Then numbers
         1.0,
         2.0,
         3.0,
@@ -63,11 +58,11 @@ correct_ordering_test() ->
         % Member order does matter for collation
         {[{<<"b">>, 2.0}, {<<"a">>, 1.0}]},
         {[{<<"b">>, 2.0}, {<<"c">>, 2.0}]}
-
     ],
 
-    BinList = lists:map(fun couch_views_encoding:encode/1, Ordered),
-    SortedBinList = lists:sort(BinList),
-    DecodedBinList = lists:map(fun couch_views_encoding:decode/1,
-        SortedBinList),
-    ?assertEqual(Ordered, DecodedBinList).
+    BinList = [couch_views_encoding:encode(O, key) || O <- Ordered],
+    Random = [{rand:uniform(), Bin} || Bin <- BinList],
+    {_, Unsorted} = lists:unzip(lists:sort(Random)),
+    Sorted = lists:sort(Unsorted),
+    Decoded = [couch_views_encoding:decode(O) || O <- Sorted],
+    ?assertEqual(Ordered, Decoded).
diff --git a/src/couch_views/test/couch_views_indexer_test.erl b/src/couch_views/test/couch_views_indexer_test.erl
index 2d192a6..e6dfdc4 100644
--- a/src/couch_views/test/couch_views_indexer_test.erl
+++ b/src/couch_views/test/couch_views_indexer_test.erl
@@ -12,232 +12,396 @@
 
 -module(couch_views_indexer_test).
 
--include_lib("couch/include/couch_eunit.hrl").
 -include_lib("eunit/include/eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+-include_lib("couch/include/couch_eunit.hrl").
 -include_lib("couch_mrview/include/couch_mrview.hrl").
 
 
--define(TDEF(A), {atom_to_list(A), fun A/0}).
-
-setup() ->
-    test_util:start_couch([fabric]).
-
-
-teardown(State) ->
-    test_util:stop_couch(State).
-
-
-foreach_setup() ->
-    ok.
+-define(I_HEART_EUNIT(Tests), [{with, [T]} || T <- Tests]).
 
 
-foreach_teardown(_) ->
-    meck:unload().
-
-
-index_server_test_() ->
+indexer_test_() ->
     {
-        "Test Couch Views indexer",
+        "Test view indexing",
         {
             setup,
             fun setup/0,
-            fun teardown/1,
+            fun cleanup/1,
             {
                 foreach,
-                fun foreach_setup/0, fun foreach_teardown/1,
-                [
-                    ?TDEF(map_docs_no_results_for_deleted),
-                    ?TDEF(map_docs_returns_sorted_results),
-                    ?TDEF(write_doc_clears_for_deleted_doc),
-                    ?TDEF(write_doc_adds_for_new_doc),
-                    ?TDEF(write_doc_clears_and_sets_for_update),
-                    ?TDEF(write_doc_clears_for_no_new_update),
-                    ?TDEF(write_doc_clears_and_updates_duplicates)
-                ]
+                fun foreach_setup/0,
+                fun foreach_teardown/1,
+                ?I_HEART_EUNIT([
+                    fun indexed_empty_db/1,
+                    fun indexed_single_doc/1,
+                    fun updated_docs_are_reindexed/1,
+                    fun updated_docs_without_changes_are_reindexed/1,
+                    fun deleted_docs_not_indexed/1,
+                    fun deleted_docs_are_unindexed/1,
+                    fun multipe_docs_with_same_key/1,
+                    fun multipe_keys_from_same_doc/1,
+                    fun multipe_identical_keys_from_same_doc/1
+                ])
             }
-
         }
     }.
 
 
-map_docs_no_results_for_deleted() ->
-    DbName = ?tempdb,
+setup() ->
+    Ctx = test_util:start_couch([
+            fabric,
+            couch_jobs,
+            couch_views
+        ]),
+    Ctx.
+
+
+cleanup(Ctx) ->
+    test_util:stop_couch(Ctx).
 
+
+foreach_setup() ->
+    {ok, Db} = fabric2_db:create(?tempdb(), [{user_ctx, ?ADMIN_USER}]),
+    Db.
+
+
+foreach_teardown(Db) ->
+    ok = fabric2_db:delete(fabric2_db:name(Db), []).
+
+
+indexed_empty_db(Db) ->
     DDoc = create_ddoc(),
-    {ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
 
-    Doc = #{
-        id => <<"id">>,
-        sequence => <<1111>>,
-        rev_id => <<"1-123">>,
-        deleted => true
-    },
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, Out} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
 
-    meck:expect(couch_query_servers, start_doc_map, fun(_, _, _) ->
-        {ok, fake}
-    end),
+    ?assertEqual([], Out).
 
-    {Results, _} = couch_views_indexer:map_docs(Mrst, [Doc]),
 
-    [#{results := DocResult}] = Results,
-    ?assertEqual([], DocResult).
+indexed_single_doc(Db) ->
+    DDoc = create_ddoc(),
+    Doc1 = doc(0),
 
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, _} = fabric2_db:update_doc(Db, Doc1, []),
 
-map_docs_returns_sorted_results() ->
-    DbName = ?tempdb,
-    Doc = #{
-        id => <<"id">>,
-        sequence => <<1111>>,
-        rev_id => <<"1-123">>,
-        doc => doc(1)
-    },
+    {ok, Out} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
 
-    CompleteResult = [[{1, 1}], []],
+    ?assertEqual([{row, [
+            {id, <<"0">>},
+            {key, 0},
+            {value, 0}
+        ]}], Out).
 
+
+updated_docs_are_reindexed(Db) ->
     DDoc = create_ddoc(),
+    Doc1 = doc(0),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc1, []),
+
+    {ok, Out1} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([{row, [
+            {id, <<"0">>},
+            {key, 0},
+            {value, 0}
+        ]}], Out1),
+
+    Doc2 = Doc1#doc{
+        revs = {Pos, [Rev]},
+        body = {[{<<"val">>, 1}]}
+    },
+    {ok, _} = fabric2_db:update_doc(Db, Doc2, []),
+
+    {ok, Out2} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([{row, [
+            {id, <<"0">>},
+            {key, 1},
+            {value, 1}
+        ]}], Out2),
+
+    % Check that our id index is updated properly
+    % as well.
+    DbName = fabric2_db:name(Db),
     {ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
+    Sig = Mrst#mrst.sig,
+    Expect = [{0, [1]}, {1, []}],
+    fabric2_fdb:transactional(Db, fun(TxDb) ->
+        ?assertEqual(
+                Expect,
+                couch_views_fdb:get_view_keys(TxDb, Sig, <<"0">>)
+            )
+    end).
 
 
-    {Results, _} = couch_views_indexer:map_docs(Mrst, [Doc]),
-    [#{results := DocResult}] = Results,
-    ?assertEqual(CompleteResult, DocResult).
-
-
-write_doc_clears_for_deleted_doc() ->
-    TxDb = #{},
-    Sig = <<123>>,
-    Doc = #{deleted => true, id => 1},
-    ViewIds = [1],
-    OldIdxKey = old_key,
-
-    meck:expect(couch_views_fdb, get_id_index, 4, old_key),
-    meck:expect(couch_views_fdb, clear_id_index, 4, ok),
-    meck:expect(couch_views_fdb, clear_map_index, 5, ok),
-
-    couch_views_indexer:write_doc(TxDb, Sig, Doc, ViewIds),
-    ?assert(meck:called(couch_views_fdb, get_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, clear_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, clear_map_index,
-        [TxDb, Sig, 1, 1, OldIdxKey])),
-    ?assertEqual(length(meck:history(couch_views_fdb)), 3).
-
-
-write_doc_adds_for_new_doc() ->
-    TxDb = #{},
-    Sig = <<123>>,
-    Key = <<"key">>,
-    Value = 1,
-    Results = [{Key, Value}],
-    Doc = #{
-        deleted => false,
-        id => 1,
-        results => [Results]
-    },
-    ViewIds = [1],
-
-    meck:expect(couch_views_fdb, get_id_index, 4, not_found),
-    meck:expect(couch_views_fdb, set_id_index, 5, ok),
-    meck:expect(couch_views_fdb, set_map_index_results, 5, ok),
-
-    couch_views_indexer:write_doc(TxDb, Sig, Doc, ViewIds),
-    ?assert(meck:called(couch_views_fdb, get_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, set_id_index,
-        [TxDb, Sig, 1, 1, Key])),
-    ?assert(meck:called(couch_views_fdb, set_map_index_results,
-        [TxDb, Sig, 1, 1, Results])),
-    ?assertEqual(length(meck:history(couch_views_fdb)), 3).
-
-
-write_doc_clears_and_sets_for_update() ->
-    TxDb = #{},
-    Sig = <<123>>,
-    Key = <<"key">>,
-    Value = 1,
-    Results = [{Key, Value}],
-    Doc = #{
-        deleted => false,
-        id => 1,
-        results => [Results]
+updated_docs_without_changes_are_reindexed(Db) ->
+    DDoc = create_ddoc(),
+    Doc1 = doc(0),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc1, []),
+
+    {ok, Out1} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([{row, [
+            {id, <<"0">>},
+            {key, 0},
+            {value, 0}
+        ]}], Out1),
+
+    Doc2 = Doc1#doc{
+        revs = {Pos, [Rev]},
+        body = {[{<<"val">>, 0}]}
     },
-    ViewIds = [1],
-    OldKey = oldkey,
-
-    meck:expect(couch_views_fdb, get_id_index, 4, OldKey),
-    meck:expect(couch_views_fdb, clear_id_index, 4, ok),
-    meck:expect(couch_views_fdb, clear_map_index, 5, ok),
-    meck:expect(couch_views_fdb, set_id_index, 5, ok),
-    meck:expect(couch_views_fdb, set_map_index_results, 5, ok),
-
-    couch_views_indexer:write_doc(TxDb, Sig, Doc, ViewIds),
-    ?assert(meck:called(couch_views_fdb, get_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, clear_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, clear_map_index,
-        [TxDb, Sig, 1, 1, OldKey])),
-    ?assert(meck:called(couch_views_fdb, set_id_index,
-        [TxDb, Sig, 1, 1, Key])),
-    ?assert(meck:called(couch_views_fdb, set_map_index_results,
-        [TxDb, Sig, 1, 1, Results])),
-    ?assertEqual(length(meck:history(couch_views_fdb)), 5).
-
-
-write_doc_clears_for_no_new_update() ->
-    TxDb = #{},
-    Sig = <<123>>,
-    Results = [],
-    Doc = #{
-        deleted => false,
-        id => 1,
-        results => [Results]
+    {ok, _} = fabric2_db:update_doc(Db, Doc2, []),
+
+    {ok, Out2} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([{row, [
+            {id, <<"0">>},
+            {key, 0},
+            {value, 0}
+        ]}], Out2),
+
+    % Check fdb directly to make sure we've also
+    % removed the id idx keys properly.
+    DbName = fabric2_db:name(Db),
+    {ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
+    Sig = Mrst#mrst.sig,
+    Expect = [{0, [0]}, {1, []}],
+    fabric2_fdb:transactional(Db, fun(TxDb) ->
+        ?assertEqual(
+                Expect,
+                couch_views_fdb:get_view_keys(TxDb, Sig, <<"0">>)
+            )
+    end).
+
+
+deleted_docs_not_indexed(Db) ->
+    DDoc = create_ddoc(),
+    Doc1 = doc(0),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc1, []),
+    Doc2 = Doc1#doc{
+        revs = {Pos, [Rev]},
+        deleted = true,
+        body = {[{<<"val">>, 1}]}
     },
-    ViewIds = [1],
-    OldKey = oldkey,
-
-    meck:expect(couch_views_fdb, get_id_index, 4, OldKey),
-    meck:expect(couch_views_fdb, clear_id_index, 4, ok),
-    meck:expect(couch_views_fdb, clear_map_index, 5, ok),
-
-    couch_views_indexer:write_doc(TxDb, Sig, Doc, ViewIds),
-    ?assert(meck:called(couch_views_fdb, get_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, clear_id_index, [TxDb, Sig, 1, 1])),
-    ?assert(meck:called(couch_views_fdb, clear_map_index,
-        [TxDb, Sig, 1, 1, OldKey])),
-    ?assertEqual(length(meck:history(couch_views_fdb)), 3).
-
-
-write_doc_clears_and_updates_duplicates() ->
-    TxDb = #{},
-    Sig = <<123>>,
-    Key = <<"key">>,
-    Results = [{Key, 1}, {Key, 2}],
-    Doc = #{
-        deleted => false,
-        id => 1,
-        results => [Results]
+    {ok, _} = fabric2_db:update_doc(Db, Doc2, []),
+
+    {ok, Out} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([], Out).
+
+
+deleted_docs_are_unindexed(Db) ->
+    DDoc = create_ddoc(),
+    Doc1 = doc(0),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, {Pos, Rev}} = fabric2_db:update_doc(Db, Doc1, []),
+
+    {ok, Out1} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([{row, [
+            {id, <<"0">>},
+            {key, 0},
+            {value, 0}
+        ]}], Out1),
+
+    Doc2 = Doc1#doc{
+        revs = {Pos, [Rev]},
+        deleted = true,
+        body = {[{<<"val">>, 1}]}
     },
-    ViewIds = [1],
-    OldKey = oldkey,
-
-    meck:expect(couch_views_fdb, get_id_index, 4, OldKey),
-    meck:expect(couch_views_fdb, clear_id_index, 4, ok),
-    meck:expect(couch_views_fdb, clear_map_index, 5, ok),
-    meck:expect(couch_views_fdb, set_id_index, 5, ok),
-    meck:expect(couch_views_fdb, set_map_index_results, 5, ok),
-
-    couch_views_indexer:write_doc(TxDb, Sig, Doc, ViewIds),
-    ?assertEqual(meck:num_calls(couch_views_fdb, get_id_index,
-        [TxDb, Sig, 1, 1]), 2),
-    ?assertEqual(meck:num_calls(couch_views_fdb, clear_id_index,
-        [TxDb, Sig, 1, 1]), 1),
-    ?assertEqual(meck:num_calls(couch_views_fdb, set_id_index,
-        [TxDb, Sig, 1, 1, Key]), 2),
-    ?assertEqual(meck:num_calls(couch_views_fdb, clear_map_index,
-        [TxDb, Sig, 1, 1, OldKey]), 1),
-    ?assertEqual(meck:num_calls(couch_views_fdb, set_map_index_results,
-        [TxDb, Sig, 1, 1, Results]), 2),
-    ?assertEqual(length(meck:history(couch_views_fdb)), 8).
+    {ok, _} = fabric2_db:update_doc(Db, Doc2, []),
+
+    {ok, Out2} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([], Out2),
+
+    % Check fdb directly to make sure we've also
+    % removed the id idx keys properly.
+    DbName = fabric2_db:name(Db),
+    {ok, Mrst} = couch_views_util:ddoc_to_mrst(DbName, DDoc),
+    Sig = Mrst#mrst.sig,
+    fabric2_fdb:transactional(Db, fun(TxDb) ->
+        ?assertEqual([], couch_views_fdb:get_view_keys(TxDb, Sig, <<"0">>))
+    end).
+
+
+multipe_docs_with_same_key(Db) ->
+    DDoc = create_ddoc(),
+    Doc1 = doc(0, 1),
+    Doc2 = doc(1, 1),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, _} = fabric2_db:update_docs(Db, [Doc1, Doc2], []),
+
+    {ok, Out} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([
+            {row, [
+                {id, <<"0">>},
+                {key, 1},
+                {value, 1}
+            ]},
+            {row, [
+                {id, <<"1">>},
+                {key, 1},
+                {value, 1}
+            ]}
+        ], Out).
+
+
+multipe_keys_from_same_doc(Db) ->
+    DDoc = create_ddoc(multi_emit_different),
+    Doc = doc(0, 1),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, _} = fabric2_db:update_doc(Db, Doc, []),
+
+    {ok, Out} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([
+            {row, [
+                {id, <<"0">>},
+                {key, 1},
+                {value, 1}
+            ]},
+            {row, [
+                {id, <<"0">>},
+                {key, <<"0">>},
+                {value, <<"0">>}
+            ]}
+        ], Out).
+
+
+multipe_identical_keys_from_same_doc(Db) ->
+    DDoc = create_ddoc(multi_emit_same),
+    Doc = doc(0, 1),
+
+    {ok, _} = fabric2_db:update_doc(Db, DDoc, []),
+    {ok, _} = fabric2_db:update_doc(Db, Doc, []),
+
+    {ok, Out} = couch_views:query(
+            Db,
+            DDoc,
+            <<"map_fun1">>,
+            fun fold_fun/2,
+            [],
+            #mrargs{}
+        ),
+
+    ?assertEqual([
+            {row, [
+                {id, <<"0">>},
+                {key, 1},
+                {value, 2}
+            ]},
+            {row, [
+                {id, <<"0">>},
+                {key, 1},
+                {value, 1}
+            ]}
+        ], Out).
+
+
+fold_fun({meta, _Meta}, Acc) ->
+    {ok, Acc};
+fold_fun({row, _} = Row, Acc) ->
+    {ok, [Row | Acc]};
+fold_fun(complete, Acc) ->
+    {ok, lists:reverse(Acc)}.
 
 
 create_ddoc() ->
+    create_ddoc(simple).
+
+
+create_ddoc(simple) ->
     couch_doc:from_json_obj({[
         {<<"_id">>, <<"_design/bar">>},
         {<<"views">>, {[
@@ -248,11 +412,47 @@ create_ddoc() ->
                 {<<"map">>, <<"function(doc) {}">>}
             ]}}
         ]}}
+    ]});
+
+create_ddoc(multi_emit_different) ->
+    couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/bar">>},
+        {<<"views">>, {[
+            {<<"map_fun1">>, {[
+                {<<"map">>, <<"function(doc) { "
+                    "emit(doc._id, doc._id); "
+                    "emit(doc.val, doc.val); "
+                "}">>}
+            ]}},
+            {<<"map_fun2">>, {[
+                {<<"map">>, <<"function(doc) {}">>}
+            ]}}
+        ]}}
+    ]});
+
+create_ddoc(multi_emit_same) ->
+    couch_doc:from_json_obj({[
+        {<<"_id">>, <<"_design/bar">>},
+        {<<"views">>, {[
+            {<<"map_fun1">>, {[
+                {<<"map">>, <<"function(doc) { "
+                    "emit(doc.val, doc.val * 2); "
+                    "emit(doc.val, doc.val); "
+                "}">>}
+            ]}},
+            {<<"map_fun2">>, {[
+                {<<"map">>, <<"function(doc) {}">>}
+            ]}}
+        ]}}
     ]}).
 
 
 doc(Id) ->
+    doc(Id, Id).
+
+
+doc(Id, Val) ->
     couch_doc:from_json_obj({[
         {<<"_id">>, list_to_binary(integer_to_list(Id))},
-        {<<"val">>, Id}
+        {<<"val">>, Val}
     ]}).
diff --git a/src/couch_views/test/couch_views_map_test.erl b/src/couch_views/test/couch_views_map_test.erl
index e7be521..ab3000e 100644
--- a/src/couch_views/test/couch_views_map_test.erl
+++ b/src/couch_views/test/couch_views_map_test.erl
@@ -76,7 +76,7 @@ should_map() ->
 
 
 should_map_with_startkey() ->
-    Result = run_query(<<"baz">>, #{start_key => 4}),
+    Result = run_query(<<"baz">>, #{start_key => 4}, true),
     Expect = {ok, [
         {row, [{id, <<"4">>}, {key, 4}, {value, 4}]},
         {row, [{id, <<"5">>}, {key, 5}, {value, 5}]},
@@ -233,7 +233,7 @@ should_map_with_startkey_with_key_array() ->
 
 
 should_map_with_startkey_and_endkey_with_key_array() ->
-    Rows = [
+    Rows1 = [
         {row, [{id, <<"4">>}, {key, [<<"4">>, 4]}, {value, 4}]},
         {row, [{id, <<"5">>}, {key, [<<"5">>, 5]}, {value, 5}]},
         {row, [{id, <<"6">>}, {key, [<<"6">>, 6]}, {value, 6}]},
@@ -241,12 +241,21 @@ should_map_with_startkey_and_endkey_with_key_array() ->
         {row, [{id, <<"8">>}, {key, [<<"8">>, 8]}, {value, 8}]}
     ],
 
+    Rows2 = [
+        {row, [{id, <<"4">>}, {key, [<<"4">>, 4]}, {value, 4}]},
+        {row, [{id, <<"5">>}, {key, [<<"5">>, 5]}, {value, 5}]},
+        {row, [{id, <<"6">>}, {key, [<<"6">>, 6]}, {value, 6}]},
+        {row, [{id, <<"7">>}, {key, [<<"7">>, 7]}, {value, 7}]},
+        {row, [{id, <<"8">>}, {key, [<<"8">>, 8]}, {value, 8}]},
+        {row, [{id, <<"9">>}, {key, [<<"9">>, 9]}, {value, 9}]}
+    ],
+
     Result = run_query(<<"boom">>, #{
         start_key => [<<"4">>],
         end_key => [<<"8">>, []]
     }),
 
-    ?assertEqual({ok, Rows}, Result),
+    ?assertEqual({ok, Rows1}, Result),
 
     ResultRev = run_query(<<"boom">>, #{
         start_key => [<<"8">>, []],
@@ -254,7 +263,7 @@ should_map_with_startkey_and_endkey_with_key_array() ->
         direction => rev
     }),
 
-    ?assertEqual({ok, lists:reverse(Rows)}, ResultRev),
+    ?assertEqual({ok, lists:reverse(Rows1)}, ResultRev),
 
     ResultRev2 = run_query(<<"boom">>, #{
         start_key => [<<"9">>, 9],
@@ -263,7 +272,21 @@ should_map_with_startkey_and_endkey_with_key_array() ->
         inclusive_end => false
     }),
 
-    ?assertEqual({ok, lists:reverse(Rows)}, ResultRev2).
+    % Here, [<<"4">>] is less than [<<"4">>, 4] so we
+    % expect rows 9-4
+    ?assertEqual({ok, lists:reverse(Rows2)}, ResultRev2),
+
+    ResultRev2 = run_query(<<"boom">>, #{
+        start_key => [<<"9">>, 9],
+        end_key => [<<"4">>, 4],
+        direction => rev,
+        inclusive_end => false
+    }),
+
+    % Here, specifying [<<"4">>, 4] as the key will prevent
+    % us from including that row which leaves rows 9-5
+    ?assertEqual({ok, lists:reverse(lists:nthtail(1, Rows2))}, ResultRev2).
+
 
 
 should_map_empty_views() ->
@@ -330,7 +353,7 @@ should_map_update_is_false() ->
         start_key => 8
     },
 
-    Result1 = couch_views:map_query(Db, DDoc, Idx, fun default_cb/2,
+    Result1 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
         [], Args1),
     ?assertEqual(Expect, Result1),
 
@@ -342,11 +365,11 @@ should_map_update_is_false() ->
         update => false
     },
 
-    Result2 = couch_views:map_query(Db, DDoc, Idx, fun default_cb/2,
+    Result2 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
         [], Args2),
     ?assertEqual(Expect, Result2),
 
-    Result3 = couch_views:map_query(Db, DDoc, Idx, fun default_cb/2,
+    Result3 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
         [], Args1),
     ?assertEqual(Expect1, Result3).
 
@@ -373,7 +396,7 @@ should_map_update_is_lazy() ->
         update => lazy
     },
 
-    Result1 = couch_views:map_query(Db, DDoc, Idx, fun default_cb/2,
+    Result1 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
         [], Args1),
     ?assertEqual({ok, []}, Result1),
 
@@ -386,7 +409,7 @@ should_map_update_is_lazy() ->
         update => false
     },
 
-    Result2 = couch_views:map_query(Db, DDoc, Idx, fun default_cb/2,
+    Result2 = couch_views:query(Db, DDoc, Idx, fun default_cb/2,
         [], Args2),
     ?assertEqual(Expect, Result2).
 
@@ -413,12 +436,21 @@ should_map_update_is_lazy() ->
 
 
 run_query(Idx, Args) ->
+    run_query(Idx, Args, false).
+
+
+run_query(Idx, Args, DebugCluster) ->
     DbName = ?tempdb(),
     {ok, Db} = fabric2_db:create(DbName, [{user_ctx, ?ADMIN_USER}]),
     DDoc = create_ddoc(),
     Docs = make_docs(10),
     fabric2_db:update_docs(Db, [DDoc | Docs]),
-    couch_views:map_query(Db, DDoc, Idx, fun default_cb/2, [], Args).
+    if not DebugCluster -> ok; true ->
+        %% couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], #{}),
+        %% fabric2_fdb:debug_cluster()
+        ok
+    end,
+    couch_views:query(Db, DDoc, Idx, fun default_cb/2, [], Args).
 
 
 default_cb(complete, Acc) ->
@@ -427,6 +459,8 @@ default_cb({final, Info}, []) ->
     {ok, [Info]};
 default_cb({final, _}, Acc) ->
     {ok, Acc};
+default_cb({meta, _}, Acc) ->
+    {ok, Acc};
 default_cb(ok, ddoc_updated) ->
     {ok, ddoc_updated};
 default_cb(Row, Acc) ->