You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by da...@apache.org on 2014/02/05 00:44:19 UTC
[06/44] Remove src/couch
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_query_servers.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_query_servers.erl b/src/couch/src/couch_query_servers.erl
deleted file mode 100644
index 4fef028..0000000
--- a/src/couch/src/couch_query_servers.erl
+++ /dev/null
@@ -1,479 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_query_servers).
-
--export([try_compile/4]).
--export([start_doc_map/3, map_docs/2, map_docs_raw/2, stop_doc_map/1, raw_to_ejson/1]).
--export([reduce/3, rereduce/3,validate_doc_update/5]).
--export([filter_docs/5]).
--export([filter_view/3]).
-
--export([with_ddoc_proc/2, proc_prompt/2, ddoc_prompt/3, ddoc_proc_prompt/3, json_doc/1]).
-
-% For 210-os-proc-pool.t
--export([get_os_process/1, ret_os_process/1]).
-
--include_lib("couch/include/couch_db.hrl").
-
--define(SUMERROR, <<"The _sum function requires that map values be numbers, "
- "arrays of numbers, or objects, not '~p'. Objects cannot be mixed with other "
- "data structures. Objects can be arbitrarily nested, provided that the values "
- "for all fields are themselves numbers, arrays of numbers, or objects.">>).
-
--define(STATERROR, <<"The _stats function requires that map values be numbers "
- "or arrays of numbers, not '~p'">>).
-
-% https://gist.github.com/df10284c76d85f988c3f
--define(SUMREGEX, {re_pattern,3,0,<<69,82,67,80,194,0,0,0,8,0,0,0,5,0,0,0,3,0,
-2,0,0,0,125,2,48,0,9,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,118,97,108,
-117,101,115,0,93,0,130,65,9,27,102,27,117,27,110,27,99,27,116,27,105,27,111,27,
-110,102,94,0,9,0,1,66,9,58,11,84,0,9,65,9,27,40,65,9,58,11,65,9,27,44,56,9,94,
-0,7,0,2,58,11,84,0,7,102,94,0,15,0,3,65,9,27,44,65,9,58,11,56,9,84,0,15,65,9,
-27,41,65,9,27,123,65,9,27,114,27,101,27,116,27,117,27,114,27,110,66,9,27,115,
-27,117,27,109,65,9,27,40,56,9,80,0,2,65,9,27,41,56,9,34,59,65,9,27,125,56,9,84,
-0,130,0,0,0,0>>}).
-
-% https://gist.github.com/cbd73238b671325f5a6f
--define(COUNTREGEX, {re_pattern,8,0,<<69,82,67,80,30,2,0,0,8,0,0,0,5,0,0,0,8,0,
-4,0,0,0,125,2,48,0,11,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,114,101,
-114,101,100,117,99,101,0,0,2,118,97,108,117,101,115,0,101,0,93,1,206,65,9,27,
-102,27,117,27,110,27,99,27,116,27,105,27,111,27,110,102,94,0,9,0,1,66,9,58,11,
-84,0,9,65,9,27,40,65,9,58,11,65,9,27,44,56,9,94,0,7,0,2,58,11,84,0,7,102,94,0,
-23,0,3,65,9,27,44,56,9,94,0,7,0,4,58,11,84,0,7,56,9,84,0,23,65,9,27,41,65,9,27,
-123,56,9,94,0,136,0,5,94,0,128,0,6,27,105,27,102,65,9,27,40,56,9,80,0,4,65,9,
-27,41,56,9,34,123,65,9,27,114,27,101,27,116,27,117,27,114,27,110,66,9,27,115,
-27,117,27,109,65,9,27,40,56,9,80,0,2,65,9,27,41,56,9,34,59,56,9,34,125,65,9,27,
-101,27,108,27,115,27,101,56,9,34,123,65,9,27,114,27,101,27,116,27,117,27,114,
-27,110,58,9,80,0,2,65,9,27,46,65,9,27,108,27,101,27,110,27,103,27,116,27,104,
-56,9,34,59,56,9,34,125,84,0,128,83,0,138,94,0,132,0,7,27,105,27,102,65,9,27,40,
-65,9,27,33,56,9,80,0,4,65,9,27,41,56,9,34,123,65,9,27,114,27,101,27,116,27,117,
-27,114,27,110,58,9,80,0,2,65,9,27,46,65,9,27,108,27,101,27,110,27,103,27,116,
-27,104,56,9,34,59,56,9,34,125,65,9,27,101,27,108,27,115,27,101,56,9,34,123,65,
-9,27,114,27,101,27,116,27,117,27,114,27,110,66,9,27,115,27,117,27,109,65,9,27,
-40,56,9,80,0,2,65,9,27,41,56,9,34,59,56,9,34,125,84,0,132,83,0,84,94,0,78,0,8,
-27,114,27,101,27,116,27,117,27,114,27,110,58,9,80,0,4,65,9,27,63,65,9,27,115,
-27,117,27,109,65,9,27,40,56,9,80,0,2,65,9,27,41,65,9,27,58,56,9,80,0,2,65,9,27,
-46,65,9,27,108,27,101,27,110,27,103,27,116,27,104,56,9,34,59,84,0,78,84,1,102,
-65,9,27,125,56,9,84,1,206,0,0,0,0,0,0,0>>}).
-
-
-try_compile(Proc, FunctionType, FunctionName, FunctionSource) ->
- try
- proc_prompt(Proc, [<<"add_fun">>, FunctionSource]),
- ok
- catch {compilation_error, E} ->
- Fmt = "Compilation of the ~s function in the '~s' view failed: ~s",
- Msg = io_lib:format(Fmt, [FunctionType, FunctionName, E]),
- throw({compilation_error, Msg})
- end.
-
-start_doc_map(Lang, Functions, Lib) ->
- Proc = get_os_process(Lang),
- case Lib of
- {[]} -> ok;
- Lib ->
- true = proc_prompt(Proc, [<<"add_lib">>, Lib])
- end,
- lists:foreach(fun(FunctionSource) ->
- true = proc_prompt(Proc, [<<"add_fun">>, FunctionSource])
- end, Functions),
- {ok, Proc}.
-
-map_docs(Proc, Docs) ->
- % send the documents
- Results = lists:map(
- fun(Doc) ->
- Json = couch_doc:to_json_obj(Doc, []),
-
- FunsResults = proc_prompt(Proc, [<<"map_doc">>, Json]),
- % the results are a json array of function map yields like this:
- % [FunResults1, FunResults2 ...]
- % where funresults is are json arrays of key value pairs:
- % [[Key1, Value1], [Key2, Value2]]
- % Convert the key, value pairs to tuples like
- % [{Key1, Value1}, {Key2, Value2}]
- lists:map(
- fun(FunRs) ->
- [list_to_tuple(FunResult) || FunResult <- FunRs]
- end,
- FunsResults)
- end,
- Docs),
- {ok, Results}.
-
-map_docs_raw(Proc, DocList) ->
- {Mod, Fun} = Proc#proc.prompt_many_fun,
- CommandList = lists:map(
- fun(Doc) ->
- EJson = couch_doc:to_json_obj(Doc, []),
- [<<"map_doc">>, EJson]
- end,
- DocList),
- Mod:Fun(Proc#proc.pid, CommandList).
-
-stop_doc_map(nil) ->
- ok;
-stop_doc_map(Proc) ->
- ok = ret_os_process(Proc).
-
-group_reductions_results([]) ->
- [];
-group_reductions_results(List) ->
- {Heads, Tails} = lists:foldl(
- fun([H|T], {HAcc,TAcc}) ->
- {[H|HAcc], [T|TAcc]}
- end, {[], []}, List),
- case Tails of
- [[]|_] -> % no tails left
- [Heads];
- _ ->
- [Heads | group_reductions_results(Tails)]
- end.
-
-rereduce(_Lang, [], _ReducedValues) ->
- {ok, []};
-rereduce(Lang, RedSrcs, ReducedValues) ->
- Grouped = group_reductions_results(ReducedValues),
- Results = lists:zipwith(
- fun
- (<<"_", _/binary>> = FunSrc, Values) ->
- {ok, [Result]} = builtin_reduce(rereduce, [FunSrc], [[[], V] || V <- Values], []),
- Result;
- (FunSrc, Values) ->
- os_rereduce(Lang, [FunSrc], Values)
- end, replace_builtin_equivalents(RedSrcs), Grouped),
- {ok, Results}.
-
-reduce(_Lang, [], _KVs) ->
- {ok, []};
-reduce(Lang, RedSrcs0, KVs) ->
- RedSrcs = replace_builtin_equivalents(RedSrcs0),
- {OsRedSrcs, BuiltinReds} = lists:partition(fun
- (<<"_", _/binary>>) -> false;
- (_OsFun) -> true
- end, RedSrcs),
- {ok, OsResults} = os_reduce(Lang, OsRedSrcs, KVs),
- {ok, BuiltinResults} = builtin_reduce(reduce, BuiltinReds, KVs, []),
- recombine_reduce_results(RedSrcs, OsResults, BuiltinResults, []).
-
-replace_builtin_equivalents([<<"_", _/binary>> = R | Rest]) ->
- [R | replace_builtin_equivalents(Rest)];
-replace_builtin_equivalents([OsFun | Rest]) ->
- case re:run(OsFun, ?SUMREGEX) of nomatch ->
- case re:run(OsFun, ?COUNTREGEX) of nomatch ->
- [OsFun | replace_builtin_equivalents(Rest)];
- {match, _} ->
- [<<"_count">> | replace_builtin_equivalents(Rest)]
- end;
- {match, _} ->
- [<<"_sum">> | replace_builtin_equivalents(Rest)]
- end;
-replace_builtin_equivalents([]) ->
- [].
-
-recombine_reduce_results([], [], [], Acc) ->
- {ok, lists:reverse(Acc)};
-recombine_reduce_results([<<"_", _/binary>>|RedSrcs], OsResults, [BRes|BuiltinResults], Acc) ->
- recombine_reduce_results(RedSrcs, OsResults, BuiltinResults, [BRes|Acc]);
-recombine_reduce_results([_OsFun|RedSrcs], [OsR|OsResults], BuiltinResults, Acc) ->
- recombine_reduce_results(RedSrcs, OsResults, BuiltinResults, [OsR|Acc]).
-
-os_reduce(_Lang, [], _KVs) ->
- {ok, []};
-os_reduce(Lang, OsRedSrcs, KVs) ->
- Proc = get_os_process(Lang),
- OsResults = try proc_prompt(Proc, [<<"reduce">>, OsRedSrcs, KVs]) of
- [true, Reductions] -> Reductions
- after
- ok = ret_os_process(Proc)
- end,
- {ok, OsResults}.
-
-os_rereduce(Lang, OsRedSrcs, KVs) ->
- Proc = get_os_process(Lang),
- try proc_prompt(Proc, [<<"rereduce">>, OsRedSrcs, KVs]) of
- [true, [Reduction]] -> Reduction
- after
- ok = ret_os_process(Proc)
- end.
-
-
-builtin_reduce(_Re, [], _KVs, Acc) ->
- {ok, lists:reverse(Acc)};
-builtin_reduce(Re, [<<"_sum",_/binary>>|BuiltinReds], KVs, Acc) ->
- Sum = builtin_sum_rows(KVs),
- builtin_reduce(Re, BuiltinReds, KVs, [Sum|Acc]);
-builtin_reduce(reduce, [<<"_count",_/binary>>|BuiltinReds], KVs, Acc) ->
- Count = length(KVs),
- builtin_reduce(reduce, BuiltinReds, KVs, [Count|Acc]);
-builtin_reduce(rereduce, [<<"_count",_/binary>>|BuiltinReds], KVs, Acc) ->
- Count = builtin_sum_rows(KVs),
- builtin_reduce(rereduce, BuiltinReds, KVs, [Count|Acc]);
-builtin_reduce(Re, [<<"_stats",_/binary>>|BuiltinReds], KVs, Acc) ->
- Stats = builtin_stats(Re, KVs),
- builtin_reduce(Re, BuiltinReds, KVs, [Stats|Acc]).
-
-builtin_sum_rows(KVs) ->
- lists:foldl(fun([_Key, Value], Acc) -> sum_values(Value, Acc) end, 0, KVs).
-
-sum_values({Props}, 0) ->
- {Props};
-sum_values({Props}, {AccProps}) ->
- {sum_objects(lists:sort(Props), lists:sort(AccProps))};
-sum_values(Value, Acc) when is_number(Value), is_number(Acc) ->
- Acc + Value;
-sum_values(Value, Acc) when is_list(Value), is_list(Acc) ->
- sum_arrays(Acc, Value);
-sum_values(Value, Acc) when is_number(Value), is_list(Acc) ->
- sum_arrays(Acc, [Value]);
-sum_values(Value, Acc) when is_list(Value), is_number(Acc) ->
- sum_arrays([Acc], Value);
-sum_values(Else, _Acc) ->
- throw_sum_error(Else).
-
-sum_objects([{K1, V1} | Rest1], [{K1, V2} | Rest2]) ->
- [{K1, sum_values(V1, V2)} | sum_objects(Rest1, Rest2)];
-sum_objects([{K1, V1} | Rest1], [{K2, V2} | Rest2]) when K1 < K2 ->
- [{K1, V1} | sum_objects(Rest1, [{K2, V2} | Rest2])];
-sum_objects([{K1, V1} | Rest1], [{K2, V2} | Rest2]) when K1 > K2 ->
- [{K2, V2} | sum_objects([{K1, V1} | Rest1], Rest2)];
-sum_objects([], Rest) ->
- Rest;
-sum_objects(Rest, []) ->
- Rest.
-
-sum_arrays([], []) ->
- [];
-sum_arrays([_|_]=Xs, []) ->
- Xs;
-sum_arrays([], [_|_]=Ys) ->
- Ys;
-sum_arrays([X|Xs], [Y|Ys]) when is_number(X), is_number(Y) ->
- [X+Y | sum_arrays(Xs,Ys)];
-sum_arrays(Else, _) ->
- throw_sum_error(Else).
-
-builtin_stats(_, []) ->
- {[{sum,0}, {count,0}, {min,0}, {max,0}, {sumsqr,0}]};
-builtin_stats(_, [[_,First]|Rest]) ->
- Unpacked = lists:foldl(fun([_Key, Value], Acc) -> stat_values(Value, Acc) end,
- build_initial_accumulator(First), Rest),
- pack_stats(Unpacked).
-
-stat_values(Value, Acc) when is_list(Value), is_list(Acc) ->
- lists:zipwith(fun stat_values/2, Value, Acc);
-stat_values({PreRed}, Acc) when is_list(PreRed) ->
- stat_values(unpack_stats({PreRed}), Acc);
-stat_values(Value, Acc) when is_number(Value) ->
- stat_values({Value, 1, Value, Value, Value*Value}, Acc);
-stat_values(Value, Acc) when is_number(Acc) ->
- stat_values(Value, {Acc, 1, Acc, Acc, Acc*Acc});
-stat_values(Value, Acc) when is_tuple(Value), is_tuple(Acc) ->
- {Sum0, Cnt0, Min0, Max0, Sqr0} = Value,
- {Sum1, Cnt1, Min1, Max1, Sqr1} = Acc,
- {
- Sum0 + Sum1,
- Cnt0 + Cnt1,
- erlang:min(Min0, Min1),
- erlang:max(Max0, Max1),
- Sqr0 + Sqr1
- };
-stat_values(Else, _Acc) ->
- throw_stat_error(Else).
-
-build_initial_accumulator(L) when is_list(L) ->
- [build_initial_accumulator(X) || X <- L];
-build_initial_accumulator(X) when is_number(X) ->
- {X, 1, X, X, X*X};
-build_initial_accumulator({Props}) ->
- unpack_stats({Props});
-build_initial_accumulator(Else) ->
- Msg = io_lib:format("non-numeric _stats input: ~w", [Else]),
- throw({invalid_value, iolist_to_binary(Msg)}).
-
-unpack_stats({PreRed}) when is_list(PreRed) ->
- {
- get_number(<<"sum">>, PreRed),
- get_number(<<"count">>, PreRed),
- get_number(<<"min">>, PreRed),
- get_number(<<"max">>, PreRed),
- get_number(<<"sumsqr">>, PreRed)
- }.
-
-pack_stats({Sum, Cnt, Min, Max, Sqr}) ->
- {[{<<"sum">>,Sum}, {<<"count">>,Cnt}, {<<"min">>,Min}, {<<"max">>,Max}, {<<"sumsqr">>,Sqr}]};
-pack_stats(Stats) when is_list(Stats) ->
- lists:map(fun pack_stats/1, Stats).
-
-get_number(Key, Props) ->
- case couch_util:get_value(Key, Props) of
- X when is_number(X) ->
- X;
- undefined when is_binary(Key) ->
- get_number(binary_to_atom(Key, latin1), Props);
- undefined ->
- Msg = io_lib:format("user _stats input missing required field ~s (~p)",
- [Key, Props]),
- throw({invalid_value, iolist_to_binary(Msg)});
- Else ->
- Msg = io_lib:format("non-numeric _stats input received for ~s: ~w",
- [Key, Else]),
- throw({invalid_value, iolist_to_binary(Msg)})
- end.
-
-% use the function stored in ddoc.validate_doc_update to test an update.
-validate_doc_update(DDoc, EditDoc, DiskDoc, Ctx, SecObj) ->
- JsonEditDoc = couch_doc:to_json_obj(EditDoc, [revs]),
- JsonDiskDoc = json_doc(DiskDoc),
- case ddoc_prompt(DDoc, [<<"validate_doc_update">>], [JsonEditDoc, JsonDiskDoc, Ctx, SecObj]) of
- 1 ->
- ok;
- {[{<<"forbidden">>, Message}]} ->
- throw({forbidden, Message});
- {[{<<"unauthorized">>, Message}]} ->
- throw({unauthorized, Message});
- Message when is_binary(Message) ->
- throw({unknown_error, Message})
- end.
-
-json_doc(nil) -> null;
-json_doc(Doc) ->
- couch_doc:to_json_obj(Doc, [revs]).
-
-filter_view(DDoc, VName, Docs) ->
- JsonDocs = [couch_doc:to_json_obj(Doc, [revs]) || Doc <- Docs],
- [true, Passes] = ddoc_prompt(DDoc, [<<"views">>, VName, <<"map">>], [JsonDocs]),
- {ok, Passes}.
-
-filter_docs(Req, Db, DDoc, FName, Docs) ->
- JsonReq = case Req of
- {json_req, JsonObj} ->
- JsonObj;
- #httpd{} = HttpReq ->
- couch_httpd_external:json_req_obj(HttpReq, Db)
- end,
- JsonDocs = [couch_doc:to_json_obj(Doc, [revs]) || Doc <- Docs],
- [true, Passes] = ddoc_prompt(DDoc, [<<"filters">>, FName],
- [JsonDocs, JsonReq]),
- {ok, Passes}.
-
-ddoc_proc_prompt({Proc, DDocId}, FunPath, Args) ->
- proc_prompt(Proc, [<<"ddoc">>, DDocId, FunPath, Args]).
-
-ddoc_prompt(DDoc, FunPath, Args) ->
- with_ddoc_proc(DDoc, fun({Proc, DDocId}) ->
- proc_prompt(Proc, [<<"ddoc">>, DDocId, FunPath, Args])
- end).
-
-with_ddoc_proc(#doc{id=DDocId,revs={Start, [DiskRev|_]}}=DDoc, Fun) ->
- Rev = couch_doc:rev_to_str({Start, DiskRev}),
- DDocKey = {DDocId, Rev},
- Proc = get_ddoc_process(DDoc, DDocKey),
- try Fun({Proc, DDocId})
- after
- ok = ret_os_process(Proc)
- end.
-
-proc_prompt(Proc, Args) ->
- case proc_prompt_raw(Proc, Args) of
- {json, Json} ->
- ?JSON_DECODE(Json);
- EJson ->
- EJson
- end.
-
-proc_prompt_raw(#proc{prompt_fun = {Mod, Func}} = Proc, Args) ->
- apply(Mod, Func, [Proc#proc.pid, Args]).
-
-raw_to_ejson({json, Json}) ->
- ?JSON_DECODE(Json);
-raw_to_ejson(EJson) ->
- EJson.
-
-proc_stop(Proc) ->
- {Mod, Func} = Proc#proc.stop_fun,
- apply(Mod, Func, [Proc#proc.pid]).
-
-proc_set_timeout(Proc, Timeout) ->
- {Mod, Func} = Proc#proc.set_timeout_fun,
- apply(Mod, Func, [Proc#proc.pid, Timeout]).
-
-get_ddoc_process(#doc{} = DDoc, DDocKey) ->
- % remove this case statement
- case gen_server:call(couch_proc_manager, {get_proc, DDoc, DDocKey}, infinity) of
- {ok, Proc, {QueryConfig}} ->
- % process knows the ddoc
- case (catch proc_prompt(Proc, [<<"reset">>, {QueryConfig}])) of
- true ->
- proc_set_timeout(Proc, couch_util:get_value(<<"timeout">>, QueryConfig)),
- Proc;
- _ ->
- catch proc_stop(Proc),
- get_ddoc_process(DDoc, DDocKey)
- end;
- Error ->
- throw(Error)
- end.
-
-get_os_process(Lang) ->
- case gen_server:call(couch_proc_manager, {get_proc, Lang}, infinity) of
- {ok, Proc, {QueryConfig}} ->
- case (catch proc_prompt(Proc, [<<"reset">>, {QueryConfig}])) of
- true ->
- proc_set_timeout(Proc, couch_util:get_value(<<"timeout">>, QueryConfig)),
- Proc;
- _ ->
- catch proc_stop(Proc),
- get_os_process(Lang)
- end;
- Error ->
- throw(Error)
- end.
-
-ret_os_process(Proc) ->
- true = gen_server:call(couch_proc_manager, {ret_proc, Proc}, infinity),
- catch unlink(Proc#proc.pid),
- ok.
-
-throw_sum_error(Else) ->
- throw({invalid_value, iolist_to_binary(io_lib:format(?SUMERROR, [Else]))}).
-
-throw_stat_error(Else) ->
- throw({invalid_value, iolist_to_binary(io_lib:format(?STATERROR, [Else]))}).
-
-
--ifdef(TEST).
--include_lib("eunit/include/eunit.hrl").
-
-sum_values_test() ->
- ?assertEqual(3, sum_values(1, 2)),
- ?assertEqual([2,4,6], sum_values(1, [1,4,6])),
- ?assertEqual([3,5,7], sum_values([3,2,4], [0,3,3])),
- X = {[{<<"a">>,1}, {<<"b">>,[1,2]}, {<<"c">>, {[{<<"d">>,3}]}},
- {<<"g">>,1}]},
- Y = {[{<<"a">>,2}, {<<"b">>,3}, {<<"c">>, {[{<<"e">>, 5}]}},
- {<<"f">>,1}, {<<"g">>,1}]},
- Z = {[{<<"a">>,3}, {<<"b">>,[4,2]}, {<<"c">>, {[{<<"d">>,3},{<<"e">>,5}]}},
- {<<"f">>,1}, {<<"g">>,2}]},
- ?assertEqual(Z, sum_values(X, Y)),
- ?assertEqual(Z, sum_values(Y, X)).
-
-stat_values_test() ->
- ?assertEqual({1, 2, 0, 1, 1}, stat_values(1, 0)),
- ?assertEqual({11, 2, 1, 10, 101}, stat_values(1, 10)),
- ?assertEqual([{9, 2, 2, 7, 53},
- {14, 2, 3, 11, 130},
- {18, 2, 5, 13, 194}
- ], stat_values([2,3,5], [7,11,13])).
-
--endif.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_secondary_sup.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_secondary_sup.erl b/src/couch/src/couch_secondary_sup.erl
deleted file mode 100644
index d0ed0c2..0000000
--- a/src/couch/src/couch_secondary_sup.erl
+++ /dev/null
@@ -1,42 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_secondary_sup).
--behaviour(supervisor).
--export([init/1, start_link/0]).
-
-start_link() ->
- supervisor:start_link({local,couch_secondary_services}, ?MODULE, []).
-
-init([]) ->
- SecondarySupervisors = [
- {couch_db_update_notifier_sup,
- {couch_db_update_notifier_sup, start_link, []},
- permanent,
- infinity,
- supervisor,
- [couch_db_update_notifier_sup]}
- ],
- Children = SecondarySupervisors ++ [
- begin
- {ok, {Module, Fun, Args}} = couch_util:parse_term(SpecStr),
-
- {list_to_atom(Name),
- {Module, Fun, Args},
- permanent,
- brutal_kill,
- worker,
- [Module]}
- end
- || {Name, SpecStr}
- <- config:get("daemons"), SpecStr /= ""],
- {ok, {{one_for_one, 50, 3600}, Children}}.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_server.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_server.erl b/src/couch/src/couch_server.erl
deleted file mode 100644
index e4de69e..0000000
--- a/src/couch/src/couch_server.erl
+++ /dev/null
@@ -1,510 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_server).
--behaviour(gen_server).
--behaviour(config_listener).
-
--export([open/2,create/2,delete/2,get_version/0,get_uuid/0]).
--export([all_databases/0, all_databases/2]).
--export([init/1, handle_call/3,sup_start_link/0]).
--export([handle_cast/2,code_change/3,handle_info/2,terminate/2]).
--export([dev_start/0,is_admin/2,has_admins/0,get_stats/0]).
--export([close_lru/0]).
-
-% config_listener api
--export([handle_config_change/5]).
-
--include_lib("couch/include/couch_db.hrl").
-
--record(server,{
- root_dir = [],
- dbname_regexp,
- max_dbs_open=100,
- dbs_open=0,
- start_time="",
- lru = couch_lru:new()
- }).
-
-dev_start() ->
- couch:stop(),
- up_to_date = make:all([load, debug_info]),
- couch:start().
-
-get_version() ->
- Apps = application:loaded_applications(),
- case lists:keysearch(couch, 1, Apps) of
- {value, {_, _, Vsn}} ->
- Vsn;
- false ->
- "0.0.0"
- end.
-
-get_uuid() ->
- case config:get("couchdb", "uuid", nil) of
- nil ->
- UUID = couch_uuids:random(),
- config:set("couchdb", "uuid", ?b2l(UUID)),
- UUID;
- UUID -> ?l2b(UUID)
- end.
-
-get_stats() ->
- {ok, #server{start_time=Time,dbs_open=Open}} =
- gen_server:call(couch_server, get_server),
- [{start_time, ?l2b(Time)}, {dbs_open, Open}].
-
-sup_start_link() ->
- gen_server:start_link({local, couch_server}, couch_server, [], []).
-
-
-open(DbName, Options0) ->
- Options = maybe_add_sys_db_callbacks(DbName, Options0),
- Ctx = couch_util:get_value(user_ctx, Options, #user_ctx{}),
- case ets:lookup(couch_dbs, DbName) of
- [#db{fd=Fd, fd_monitor=Lock} = Db] when Lock =/= locked ->
- update_lru(DbName, Options),
- {ok, Db#db{user_ctx=Ctx, fd_monitor=erlang:monitor(process,Fd)}};
- _ ->
- Timeout = couch_util:get_value(timeout, Options, infinity),
- case gen_server:call(couch_server, {open, DbName, Options}, Timeout) of
- {ok, #db{fd=Fd} = Db} ->
- update_lru(DbName, Options),
- {ok, Db#db{user_ctx=Ctx, fd_monitor=erlang:monitor(process,Fd)}};
- Error ->
- Error
- end
- end.
-
-update_lru(DbName, Options) ->
- case lists:member(sys_db, Options) of
- false -> gen_server:cast(couch_server, {update_lru, DbName});
- true -> ok
- end.
-
-close_lru() ->
- gen_server:call(couch_server, close_lru).
-
-create(DbName, Options0) ->
- Options = maybe_add_sys_db_callbacks(DbName, Options0),
- case gen_server:call(couch_server, {create, DbName, Options}, infinity) of
- {ok, #db{fd=Fd} = Db} ->
- Ctx = couch_util:get_value(user_ctx, Options, #user_ctx{}),
- {ok, Db#db{user_ctx=Ctx, fd_monitor=erlang:monitor(process,Fd)}};
- Error ->
- Error
- end.
-
-delete(DbName, Options) ->
- gen_server:call(couch_server, {delete, DbName, Options}, infinity).
-
-maybe_add_sys_db_callbacks(DbName, Options) when is_binary(DbName) ->
- maybe_add_sys_db_callbacks(?b2l(DbName), Options);
-maybe_add_sys_db_callbacks(DbName, Options) ->
- case config:get("replicator", "db", "_replicator") of
- DbName ->
- [
- {before_doc_update, fun couch_replicator_manager:before_doc_update/2},
- {after_doc_read, fun couch_replicator_manager:after_doc_read/2},
- sys_db | Options
- ];
- _ ->
- case config:get("couch_httpd_auth", "authentication_db", "_users") of
- DbName ->
- [
- {before_doc_update, fun couch_users_db:before_doc_update/2},
- {after_doc_read, fun couch_users_db:after_doc_read/2},
- sys_db | Options
- ];
- _ ->
- case config:get("mem3", "shard_db", "dbs") of
- DbName ->
- [sys_db | Options];
- _ ->
- Options
- end
- end
- end.
-
-check_dbname(#server{dbname_regexp=RegExp}, DbName) ->
- case re:run(DbName, RegExp, [{capture, none}]) of
- nomatch ->
- case DbName of
- "_users" -> ok;
- "_replicator" -> ok;
- _Else ->
- {error, illegal_database_name, DbName}
- end;
- match ->
- ok
- end.
-
-is_admin(User, ClearPwd) ->
- case config:get("admins", User) of
- "-hashed-" ++ HashedPwdAndSalt ->
- [HashedPwd, Salt] = string:tokens(HashedPwdAndSalt, ","),
- couch_util:to_hex(crypto:sha(ClearPwd ++ Salt)) == HashedPwd;
- _Else ->
- false
- end.
-
-has_admins() ->
- config:get("admins") /= [].
-
-get_full_filename(Server, DbName) ->
- filename:join([Server#server.root_dir, "./" ++ DbName ++ ".couch"]).
-
-hash_admin_passwords() ->
- hash_admin_passwords(true).
-
-hash_admin_passwords(Persist) ->
- lists:foreach(
- fun({User, ClearPassword}) ->
- HashedPassword = couch_passwords:hash_admin_password(ClearPassword),
- config:set("admins", User, ?b2l(HashedPassword), Persist)
- end, couch_passwords:get_unhashed_admins()).
-
-init([]) ->
- % read config and register for configuration changes
-
- % just stop if one of the config settings change. couch_server_sup
- % will restart us and then we will pick up the new settings.
-
- RootDir = config:get("couchdb", "database_dir", "."),
- MaxDbsOpen = list_to_integer(
- config:get("couchdb", "max_dbs_open")),
- ok = config:listen_for_changes(?MODULE, nil),
- ok = couch_file:init_delete_dir(RootDir),
- hash_admin_passwords(),
- {ok, RegExp} = re:compile(
- "^[a-z][a-z0-9\\_\\$()\\+\\-\\/]*" % use the stock CouchDB regex
- "(\\.[0-9]{10,})?$" % but allow an optional shard timestamp at the end
- ),
- ets:new(couch_dbs, [set, protected, named_table, {keypos, #db.name}]),
- process_flag(trap_exit, true),
- {ok, #server{root_dir=RootDir,
- dbname_regexp=RegExp,
- max_dbs_open=MaxDbsOpen,
- start_time=couch_util:rfc1123_date()}}.
-
-terminate(_Reason, _Srv) ->
- ets:foldl(fun(#db{main_pid=Pid}, _) -> couch_util:shutdown_sync(Pid) end,
- nil, couch_dbs),
- ok.
-
-handle_config_change("couchdb", "database_dir", _, _, _) ->
- exit(whereis(couch_server), config_change),
- remove_handler;
-handle_config_change("couchdb", "max_dbs_open", Max, _, _) ->
- {ok, gen_server:call(couch_server,{set_max_dbs_open,list_to_integer(Max)})};
-handle_config_change("admins", _, _, Persist, _) ->
- % spawn here so couch event manager doesn't deadlock
- {ok, spawn(fun() -> hash_admin_passwords(Persist) end)};
-handle_config_change("httpd", "authentication_handlers", _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change("httpd", "bind_address", _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change("httpd", "port", _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change("httpd", "max_connections", _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change("httpd", "default_handler", _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change("httpd_global_handlers", _, _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change("httpd_db_handlers", _, _, _, _) ->
- {ok, couch_httpd:stop()};
-handle_config_change(_, _, _, _, _) ->
- {ok, nil}.
-
-
-all_databases() ->
- {ok, DbList} = all_databases(
- fun(DbName, Acc) -> {ok, [DbName | Acc]} end, []),
- {ok, lists:usort(DbList)}.
-
-all_databases(Fun, Acc0) ->
- {ok, #server{root_dir=Root}} = gen_server:call(couch_server, get_server),
- NormRoot = couch_util:normpath(Root),
- FinalAcc = try
- filelib:fold_files(Root,
- "^[a-z0-9\\_\\$()\\+\\-]*" % stock CouchDB name regex
- "(\\.[0-9]{10,})?" % optional shard timestamp
- "\\.couch$", % filename extension
- true,
- fun(Filename, AccIn) ->
- NormFilename = couch_util:normpath(Filename),
- case NormFilename -- NormRoot of
- [$/ | RelativeFilename] -> ok;
- RelativeFilename -> ok
- end,
- case Fun(?l2b(filename:rootname(RelativeFilename, ".couch")), AccIn) of
- {ok, NewAcc} -> NewAcc;
- {stop, NewAcc} -> throw({stop, Fun, NewAcc})
- end
- end, Acc0)
- catch throw:{stop, Fun, Acc1} ->
- Acc1
- end,
- {ok, FinalAcc}.
-
-
-make_room(Server, Options) ->
- case lists:member(sys_db, Options) of
- false -> maybe_close_lru_db(Server);
- true -> {ok, Server}
- end.
-
-maybe_close_lru_db(#server{dbs_open=NumOpen, max_dbs_open=MaxOpen}=Server)
- when NumOpen < MaxOpen ->
- {ok, Server};
-maybe_close_lru_db(#server{lru=Lru}=Server) ->
- try
- {ok, db_closed(Server#server{lru = couch_lru:close(Lru)}, [])}
- catch error:all_dbs_active ->
- {error, all_dbs_active}
- end.
-
-open_async(Server, From, DbName, Filepath, Options) ->
- Parent = self(),
- put({async_open, DbName}, now()),
- Opener = spawn_link(fun() ->
- Res = couch_db:start_link(DbName, Filepath, Options),
- case {Res, lists:member(create, Options)} of
- {{ok, _Db}, true} ->
- couch_db_update_notifier:notify({created, DbName});
- _ ->
- ok
- end,
- gen_server:call(Parent, {open_result, DbName, Res}, infinity),
- unlink(Parent)
- end),
- ReqType = case lists:member(create, Options) of
- true -> create;
- false -> open
- end,
- % icky hack of field values - compactor_pid used to store clients
- % and fd used for opening request info
- true = ets:insert(couch_dbs, #db{
- name = DbName,
- fd = ReqType,
- main_pid = Opener,
- compactor_pid = [From],
- fd_monitor = locked,
- options = Options
- }),
- db_opened(Server, Options).
-
-handle_call(close_lru, _From, #server{lru=Lru} = Server) ->
- try
- {reply, ok, db_closed(Server#server{lru = couch_lru:close(Lru)}, [])}
- catch error:all_dbs_active ->
- {reply, {error, all_dbs_active}, Server}
- end;
-handle_call(open_dbs_count, _From, Server) ->
- {reply, Server#server.dbs_open, Server};
-handle_call({set_max_dbs_open, Max}, _From, Server) ->
- {reply, ok, Server#server{max_dbs_open=Max}};
-handle_call(get_server, _From, Server) ->
- {reply, {ok, Server}, Server};
-handle_call({open_result, DbName, {ok, Db}}, _From, Server) ->
- link(Db#db.main_pid),
- case erase({async_open, DbName}) of undefined -> ok; T0 ->
- ?LOG_INFO("needed ~p ms to open new ~s", [timer:now_diff(now(),T0)/1000,
- DbName])
- end,
- % icky hack of field values - compactor_pid used to store clients
- % and fd used to possibly store a creation request
- [#db{fd=ReqType, compactor_pid=Froms}] = ets:lookup(couch_dbs, DbName),
- [gen_server:reply(From, {ok, Db}) || From <- Froms],
- % Cancel the creation request if it exists.
- case ReqType of
- {create, DbName, _Filepath, _Options, CrFrom} ->
- gen_server:reply(CrFrom, file_exists);
- _ ->
- ok
- end,
- true = ets:insert(couch_dbs, Db),
- Lru = case couch_db:is_system_db(Db) of
- false ->
- Stat = {couchdb, open_databases},
- couch_stats_collector:track_process_count(Db#db.main_pid, Stat),
- couch_lru:insert(DbName, Server#server.lru);
- true ->
- Server#server.lru
- end,
- {reply, ok, Server#server{lru = Lru}};
-handle_call({open_result, DbName, {error, eexist}}, From, Server) ->
- handle_call({open_result, DbName, file_exists}, From, Server);
-handle_call({open_result, DbName, Error}, _From, Server) ->
- % icky hack of field values - compactor_pid used to store clients
- [#db{fd=ReqType, compactor_pid=Froms}=Db] = ets:lookup(couch_dbs, DbName),
- [gen_server:reply(From, Error) || From <- Froms],
- ?LOG_INFO("open_result error ~p for ~s", [Error, DbName]),
- true = ets:delete(couch_dbs, DbName),
- NewServer = case ReqType of
- {create, DbName, Filepath, Options, CrFrom} ->
- open_async(Server, CrFrom, DbName, Filepath, Options);
- _ ->
- Server
- end,
- {reply, ok, db_closed(NewServer, Db#db.options)};
-handle_call({open, DbName, Options}, From, Server) ->
- case ets:lookup(couch_dbs, DbName) of
- [] ->
- DbNameList = binary_to_list(DbName),
- case check_dbname(Server, DbNameList) of
- ok ->
- case make_room(Server, Options) of
- {ok, Server2} ->
- Filepath = get_full_filename(Server, DbNameList),
- {noreply, open_async(Server2, From, DbName, Filepath, Options)};
- CloseError ->
- {reply, CloseError, Server}
- end;
- Error ->
- {reply, Error, Server}
- end;
- [#db{compactor_pid = Froms} = Db] when is_list(Froms) ->
- % icky hack of field values - compactor_pid used to store clients
- true = ets:insert(couch_dbs, Db#db{compactor_pid = [From|Froms]}),
- if length(Froms) =< 10 -> ok; true ->
- Fmt = "~b clients waiting to open db ~s",
- ?LOG_INFO(Fmt, [length(Froms), DbName])
- end,
- {noreply, Server};
- [#db{} = Db] ->
- {reply, {ok, Db}, Server}
- end;
-handle_call({create, DbName, Options}, From, Server) ->
- DbNameList = binary_to_list(DbName),
- Filepath = get_full_filename(Server, DbNameList),
- case check_dbname(Server, DbNameList) of
- ok ->
- case ets:lookup(couch_dbs, DbName) of
- [] ->
- case make_room(Server, Options) of
- {ok, Server2} ->
- {noreply, open_async(Server2, From, DbName, Filepath,
- [create | Options])};
- CloseError ->
- {reply, CloseError, Server}
- end;
- [#db{fd=open}=Db] ->
- % We're trying to create a database while someone is in
- % the middle of trying to open it. We allow one creator
- % to wait while we figure out if it'll succeed.
- % icky hack of field values - fd used to store create request
- CrOptions = [create | Options],
- NewDb = Db#db{fd={create, DbName, Filepath, CrOptions, From}},
- true = ets:insert(couch_dbs, NewDb),
- {noreply, Server};
- [_AlreadyRunningDb] ->
- {reply, file_exists, Server}
- end;
- Error ->
- {reply, Error, Server}
- end;
-handle_call({delete, DbName, Options}, _From, Server) ->
- DbNameList = binary_to_list(DbName),
- case check_dbname(Server, DbNameList) of
- ok ->
- FullFilepath = get_full_filename(Server, DbNameList),
- Server2 =
- case ets:lookup(couch_dbs, DbName) of
- [] -> Server;
- [#db{main_pid=Pid, compactor_pid=Froms} = Db] when is_list(Froms) ->
- % icky hack of field values - compactor_pid used to store clients
- true = ets:delete(couch_dbs, DbName),
- exit(Pid, kill),
- [gen_server:reply(F, not_found) || F <- Froms],
- db_closed(Server, Db#db.options);
- [#db{main_pid=Pid} = Db] ->
- true = ets:delete(couch_dbs, DbName),
- exit(Pid, kill),
- db_closed(Server, Db#db.options)
- end,
-
- %% Delete any leftover compaction files. If we don't do this a
- %% subsequent request for this DB will try to open them to use
- %% as a recovery.
- lists:foreach(fun(Ext) ->
- couch_file:delete(Server#server.root_dir, FullFilepath ++ Ext)
- end, [".compact", ".compact.data", ".compact.meta"]),
- couch_file:delete(Server#server.root_dir, FullFilepath ++ ".compact"),
-
- Async = not lists:member(sync, Options),
-
- case couch_file:delete(Server#server.root_dir, FullFilepath, Async) of
- ok ->
- couch_db_update_notifier:notify({deleted, DbName}),
- {reply, ok, Server2};
- {error, enoent} ->
- {reply, not_found, Server2};
- Else ->
- {reply, Else, Server2}
- end;
- Error ->
- {reply, Error, Server}
- end;
-handle_call({db_updated, #db{name = DbName} = Db}, _From, Server) ->
- true = ets:insert(couch_dbs, Db),
- Lru = case couch_db:is_system_db(Db) of
- false -> couch_lru:update(DbName, Server#server.lru);
- true -> Server#server.lru
- end,
- {reply, ok, Server#server{lru = Lru}}.
-
-handle_cast({update_lru, DbName}, #server{lru = Lru} = Server) ->
- {noreply, Server#server{lru = couch_lru:update(DbName, Lru)}};
-handle_cast(Msg, Server) ->
- {stop, {unknown_cast_message, Msg}, Server}.
-
-code_change(_, State, _) ->
- {ok, State}.
-
-handle_info({'EXIT', _Pid, config_change}, Server) ->
- {stop, config_change, Server};
-handle_info({'EXIT', Pid, Reason}, Server) ->
- case ets:match_object(couch_dbs, #db{main_pid=Pid, _='_'}) of
- [#db{name = DbName, compactor_pid=Froms} = Db] ->
- if Reason /= snappy_nif_not_loaded -> ok; true ->
- Msg = io_lib:format("To open the database `~s`, Apache CouchDB "
- "must be built with Erlang OTP R13B04 or higher.", [DbName]),
- ?LOG_ERROR(Msg, [])
- end,
- ?LOG_INFO("db ~s died with reason ~p", [DbName, Reason]),
- % icky hack of field values - compactor_pid used to store clients
- if is_list(Froms) ->
- [gen_server:reply(From, Reason) || From <- Froms];
- true ->
- ok
- end,
- true = ets:delete(couch_dbs, DbName),
- {noreply, db_closed(Server, Db#db.options)};
- [] ->
- {noreply, Server}
- end;
-handle_info(Info, Server) ->
- {stop, {unknown_message, Info}, Server}.
-
-db_opened(Server, Options) ->
- case lists:member(sys_db, Options) of
- false -> Server#server{dbs_open=Server#server.dbs_open + 1};
- true -> Server
- end.
-
-db_closed(Server, Options) ->
- case lists:member(sys_db, Options) of
- false -> Server#server{dbs_open=Server#server.dbs_open - 1};
- true -> Server
- end.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_stats_aggregator.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_stats_aggregator.erl b/src/couch/src/couch_stats_aggregator.erl
deleted file mode 100644
index 416c9a0..0000000
--- a/src/couch/src/couch_stats_aggregator.erl
+++ /dev/null
@@ -1,312 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_stats_aggregator).
--behaviour(gen_server).
--behaviour(config_listener).
-
--export([start/0, start/1, stop/0]).
--export([all/0, all/1, get/1, get/2, get_json/1, get_json/2, collect_sample/0]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
-% config_listener api
--export([handle_config_change/5]).
-
-
--record(aggregate, {
- description = <<"">>,
- seconds = 0,
- count = 0,
- current = null,
- sum = null,
- mean = null,
- variance = null,
- stddev = null,
- min = null,
- max = null,
- samples = []
-}).
-
-
-start() ->
- PrivDir = couch_util:priv_dir(),
- start(filename:join(PrivDir, "stat_descriptions.cfg")).
-
-start(FileName) ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [FileName], []).
-
-stop() ->
- gen_server:cast(?MODULE, stop).
-
-all() ->
- ?MODULE:all(0).
-all(Time) when is_binary(Time) ->
- ?MODULE:all(list_to_integer(binary_to_list(Time)));
-all(Time) when is_atom(Time) ->
- ?MODULE:all(list_to_integer(atom_to_list(Time)));
-all(Time) when is_integer(Time) ->
- Aggs = ets:match(?MODULE, {{'$1', Time}, '$2'}),
- Stats = lists:map(fun([Key, Agg]) -> {Key, Agg} end, Aggs),
- case Stats of
- [] ->
- {[]};
- _ ->
- Ret = lists:foldl(fun({{Mod, Key}, Agg}, Acc) ->
- CurrKeys = case proplists:lookup(Mod, Acc) of
- none -> [];
- {Mod, {Keys}} -> Keys
- end,
- NewMod = {[{Key, to_json_term(Agg)} | CurrKeys]},
- [{Mod, NewMod} | proplists:delete(Mod, Acc)]
- end, [], Stats),
- {Ret}
- end.
-
-get(Key) ->
- ?MODULE:get(Key, 0).
-get(Key, Time) when is_binary(Time) ->
- ?MODULE:get(Key, list_to_integer(binary_to_list(Time)));
-get(Key, Time) when is_atom(Time) ->
- ?MODULE:get(Key, list_to_integer(atom_to_list(Time)));
-get(Key, Time) when is_integer(Time) ->
- case ets:lookup(?MODULE, {make_key(Key), Time}) of
- [] -> #aggregate{seconds=Time};
- [{_, Agg}] -> Agg
- end.
-
-get_json(Key) ->
- get_json(Key, 0).
-get_json(Key, Time) ->
- to_json_term(?MODULE:get(Key, Time)).
-
-collect_sample() ->
- gen_server:call(?MODULE, collect_sample, infinity).
-
-
-init(StatDescsFileName) ->
- % Create an aggregate entry for each {description, rate} pair.
- ets:new(?MODULE, [named_table, set, protected]),
- SampleStr = config:get("stats", "samples", "[0]"),
- {ok, Samples} = couch_util:parse_term(SampleStr),
- {ok, Descs} = file:consult(StatDescsFileName),
- lists:foreach(fun({Sect, Key, Value}) ->
- lists:foreach(fun(Secs) ->
- Agg = #aggregate{
- description=list_to_binary(Value),
- seconds=Secs
- },
- ets:insert(?MODULE, {{{Sect, Key}, Secs}, Agg})
- end, Samples)
- end, Descs),
-
- ok = config:listen_for_changes(?MODULE, nil),
-
- Rate = list_to_integer(config:get("stats", "rate", "1000")),
- % TODO: Add timer_start to kernel start options.
- {ok, TRef} = timer:apply_after(Rate, ?MODULE, collect_sample, []),
- {ok, {TRef, Rate}}.
-
-terminate(_Reason, {TRef, _Rate}) ->
- timer:cancel(TRef),
- ok.
-
-handle_call(collect_sample, _, {OldTRef, SampleInterval}) ->
- timer:cancel(OldTRef),
- {ok, TRef} = timer:apply_after(SampleInterval, ?MODULE, collect_sample, []),
- % Gather new stats values to add.
- Incs = lists:map(fun({Key, Value}) ->
- {Key, {incremental, Value}}
- end, couch_stats_collector:all(incremental)),
- Abs = lists:map(fun({Key, Values}) ->
- couch_stats_collector:clear(Key),
- Values2 = case Values of
- X when is_list(X) -> X;
- Else -> [Else]
- end,
- {_, Mean} = lists:foldl(fun(Val, {Count, Curr}) ->
- {Count+1, Curr + (Val - Curr) / (Count+1)}
- end, {0, 0}, Values2),
- {Key, {absolute, Mean}}
- end, couch_stats_collector:all(absolute)),
-
- Values = Incs ++ Abs,
- Now = erlang:now(),
- lists:foreach(fun({{Key, Rate}, Agg}) ->
- NewAgg = case proplists:lookup(Key, Values) of
- none ->
- rem_values(Now, Agg);
- {Key, {Type, Value}} ->
- NewValue = new_value(Type, Value, Agg#aggregate.current),
- Agg2 = add_value(Now, NewValue, Agg),
- rem_values(Now, Agg2)
- end,
- ets:insert(?MODULE, {{Key, Rate}, NewAgg})
- end, ets:tab2list(?MODULE)),
- {reply, ok, {TRef, SampleInterval}}.
-
-handle_cast(stop, State) ->
- {stop, normal, State}.
-
-handle_info({gen_event_EXIT, {config_listener, ?MODULE}, _Reason}, State) ->
- erlang:send_after(5000, self(), restart_config_listener),
- {noreply, State};
-handle_info(restart_config_listener, State) ->
- ok = config:listen_for_changes(?MODULE, nil),
- {noreply, State};
-handle_info(_Info, State) ->
- {noreply, State}.
-
-code_change(_OldVersion, State, _Extra) ->
- {ok, State}.
-
-
-handle_config_change("stats", _, _, _, _) ->
- exit(whereis(?MODULE), config_change),
- remove_handler;
-handle_config_change(_, _, _, _, _) ->
- {ok, nil}.
-
-
-new_value(incremental, Value, null) ->
- Value;
-new_value(incremental, Value, Current) ->
- Value - Current;
-new_value(absolute, Value, _Current) ->
- Value.
-
-add_value(Time, Value, #aggregate{count=Count, seconds=Secs}=Agg) when Count < 1 ->
- Samples = case Secs of
- 0 -> [];
- _ -> [{Time, Value}]
- end,
- Agg#aggregate{
- count=1,
- current=Value,
- sum=Value,
- mean=Value,
- variance=0.0,
- stddev=null,
- min=Value,
- max=Value,
- samples=Samples
- };
-add_value(Time, Value, Agg) ->
- #aggregate{
- count=Count,
- current=Current,
- sum=Sum,
- mean=Mean,
- variance=Variance,
- samples=Samples
- } = Agg,
-
- NewCount = Count + 1,
- NewMean = Mean + (Value - Mean) / NewCount,
- NewVariance = Variance + (Value - Mean) * (Value - NewMean),
- StdDev = case NewCount > 1 of
- false -> null;
- _ -> math:sqrt(NewVariance / (NewCount - 1))
- end,
- Agg2 = Agg#aggregate{
- count=NewCount,
- current=Current + Value,
- sum=Sum + Value,
- mean=NewMean,
- variance=NewVariance,
- stddev=StdDev,
- min=lists:min([Agg#aggregate.min, Value]),
- max=lists:max([Agg#aggregate.max, Value])
- },
- case Agg2#aggregate.seconds of
- 0 -> Agg2;
- _ -> Agg2#aggregate{samples=[{Time, Value} | Samples]}
- end.
-
-rem_values(Time, Agg) ->
- Seconds = Agg#aggregate.seconds,
- Samples = Agg#aggregate.samples,
- Pred = fun({When, _Value}) ->
- timer:now_diff(Time, When) =< (Seconds * 1000000)
- end,
- {Keep, Remove} = lists:splitwith(Pred, Samples),
- Agg2 = lists:foldl(fun({_, Value}, Acc) ->
- rem_value(Value, Acc)
- end, Agg, Remove),
- Agg2#aggregate{samples=Keep}.
-
-rem_value(_Value, #aggregate{count=Count, seconds=Secs}) when Count =< 1 ->
- #aggregate{seconds=Secs};
-rem_value(Value, Agg) ->
- #aggregate{
- count=Count,
- sum=Sum,
- mean=Mean,
- variance=Variance
- } = Agg,
-
- OldMean = (Mean * Count - Value) / (Count - 1),
- OldVariance = Variance - (Value - OldMean) * (Value - Mean),
- OldCount = Count - 1,
- StdDev = case OldCount > 1 of
- false -> null;
- _ -> math:sqrt(clamp_value(OldVariance / (OldCount - 1)))
- end,
- Agg#aggregate{
- count=OldCount,
- sum=Sum-Value,
- mean=clamp_value(OldMean),
- variance=clamp_value(OldVariance),
- stddev=StdDev
- }.
-
-to_json_term(Agg) ->
- {Min, Max} = case Agg#aggregate.seconds > 0 of
- false ->
- {Agg#aggregate.min, Agg#aggregate.max};
- _ ->
- case length(Agg#aggregate.samples) > 0 of
- true ->
- Extract = fun({_Time, Value}) -> Value end,
- Samples = lists:map(Extract, Agg#aggregate.samples),
- {lists:min(Samples), lists:max(Samples)};
- _ ->
- {null, null}
- end
- end,
- {[
- {description, Agg#aggregate.description},
- {current, round_value(Agg#aggregate.sum)},
- {sum, round_value(Agg#aggregate.sum)},
- {mean, round_value(Agg#aggregate.mean)},
- {stddev, round_value(Agg#aggregate.stddev)},
- {min, Min},
- {max, Max}
- ]}.
-
-make_key({Mod, Val}) when is_integer(Val) ->
- {Mod, list_to_atom(integer_to_list(Val))};
-make_key(Key) ->
- Key.
-
-round_value(Val) when not is_number(Val) ->
- Val;
-round_value(Val) when Val == 0 ->
- Val;
-round_value(Val) ->
- erlang:round(Val * 1000.0) / 1000.0.
-
-clamp_value(Val) when Val > 0.00000000000001 ->
- Val;
-clamp_value(_) ->
- 0.0.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_stats_collector.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_stats_collector.erl b/src/couch/src/couch_stats_collector.erl
deleted file mode 100644
index 99814de..0000000
--- a/src/couch/src/couch_stats_collector.erl
+++ /dev/null
@@ -1,134 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-% todo
-% - remove existance check on increment(), decrement() and record(). have
-% modules initialize counters on startup.
-
--module(couch_stats_collector).
-
--behaviour(gen_server).
-
--export([start/0, stop/0]).
--export([all/0, all/1, get/1, increment/1, decrement/1, record/2, clear/1]).
--export([track_process_count/1, track_process_count/2]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--define(HIT_TABLE, stats_hit_table).
--define(ABS_TABLE, stats_abs_table).
-
-start() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-stop() ->
- gen_server:call(?MODULE, stop).
-
-all() ->
- ets:tab2list(?HIT_TABLE) ++ abs_to_list().
-
-all(Type) ->
- case Type of
- incremental -> ets:tab2list(?HIT_TABLE);
- absolute -> abs_to_list()
- end.
-
-get(Key) ->
- case ets:lookup(?HIT_TABLE, Key) of
- [] ->
- case ets:lookup(?ABS_TABLE, Key) of
- [] ->
- nil;
- AbsVals ->
- lists:map(fun({_, Value}) -> Value end, AbsVals)
- end;
- [{_, Counter}] ->
- Counter
- end.
-
-increment(Key) ->
- Key2 = make_key(Key),
- case catch ets:update_counter(?HIT_TABLE, Key2, 1) of
- {'EXIT', {badarg, _}} ->
- catch ets:insert(?HIT_TABLE, {Key2, 1}),
- ok;
- _ ->
- ok
- end.
-
-decrement(Key) ->
- Key2 = make_key(Key),
- case catch ets:update_counter(?HIT_TABLE, Key2, -1) of
- {'EXIT', {badarg, _}} ->
- catch ets:insert(?HIT_TABLE, {Key2, -1}),
- ok;
- _ -> ok
- end.
-
-record(Key, Value) ->
- catch ets:insert(?ABS_TABLE, {make_key(Key), Value}).
-
-clear(Key) ->
- catch ets:delete(?ABS_TABLE, make_key(Key)).
-
-track_process_count(Stat) ->
- track_process_count(self(), Stat).
-
-track_process_count(Pid, Stat) ->
- ok = couch_stats_collector:increment(Stat),
- gen_server:cast(?MODULE, {track_process_count, Pid, Stat}).
-
-
-init(_) ->
- ets:new(?HIT_TABLE, [named_table, set, public]),
- ets:new(?ABS_TABLE, [named_table, duplicate_bag, public]),
- {ok, dict:new()}.
-
-terminate(_Reason, _State) ->
- ok.
-
-handle_call(stop, _, State) ->
- {stop, normal, stopped, State}.
-
-handle_cast({track_process_count, Pid, Stat}, State) ->
- Ref = erlang:monitor(process, Pid),
- {noreply, dict:store(Ref, Stat, State)}.
-
-handle_info({'DOWN', Ref, _, _, _}, State) ->
- Stat = dict:fetch(Ref, State),
- ok = couch_stats_collector:decrement(Stat),
- {noreply, dict:erase(Ref, State)}.
-
-code_change(_OldVersion, State, _Extra) when is_list(State) ->
- {ok, dict:from_list(State)};
-code_change(_OldVersion, State, _Extra) ->
- {ok, State}.
-
-
-make_key({Module, Key}) when is_integer(Key) ->
- {Module, list_to_atom(integer_to_list(Key))};
-make_key(Key) ->
- Key.
-
-abs_to_list() ->
- SortedKVs = lists:sort(ets:tab2list(?ABS_TABLE)),
- lists:foldl(fun({Key, Val}, Acc) ->
- case Acc of
- [] ->
- [{Key, [Val]}];
- [{Key, Prev} | Rest] ->
- [{Key, [Val | Prev]} | Rest];
- Others ->
- [{Key, [Val]} | Others]
- end
- end, [], SortedKVs).
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_stream.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_stream.erl b/src/couch/src/couch_stream.erl
deleted file mode 100644
index 6e7213b..0000000
--- a/src/couch/src/couch_stream.erl
+++ /dev/null
@@ -1,299 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_stream).
--behaviour(gen_server).
-
-% public API
--export([open/1, open/2, close/1]).
--export([foldl/4, foldl/5, foldl_decode/6, range_foldl/6]).
--export([copy_to_new_stream/3, write/2]).
-
-% gen_server callbacks
--export([init/1, terminate/2, code_change/3]).
--export([handle_cast/2, handle_call/3, handle_info/2]).
-
--include_lib("couch/include/couch_db.hrl").
-
--define(DEFAULT_BUFFER_SIZE, 4096).
-
--record(stream,
- {fd = 0,
- written_pointers=[],
- buffer_list = [],
- buffer_len = 0,
- max_buffer,
- written_len = 0,
- md5,
- % md5 of the content without any transformation applied (e.g. compression)
- % needed for the attachment upload integrity check (ticket 558)
- identity_md5,
- identity_len = 0,
- encoding_fun,
- end_encoding_fun
- }).
-
-
-%%% Interface functions %%%
-
-open(Fd) ->
- open(Fd, []).
-
-open(Fd, Options) ->
- gen_server:start_link(couch_stream, {Fd, Options}, []).
-
-close(Pid) ->
- gen_server:call(Pid, close, infinity).
-
-copy_to_new_stream(Fd, PosList, DestFd) ->
- {ok, Dest} = open(DestFd),
- foldl(Fd, PosList,
- fun(Bin, _) ->
- ok = write(Dest, Bin)
- end, ok),
- close(Dest).
-
-foldl(_Fd, [], _Fun, Acc) ->
- Acc;
-foldl(Fd, [Pos|Rest], Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- foldl(Fd, Rest, Fun, Fun(Bin, Acc)).
-
-foldl(Fd, PosList, <<>>, Fun, Acc) ->
- foldl(Fd, PosList, Fun, Acc);
-foldl(Fd, PosList, Md5, Fun, Acc) ->
- foldl(Fd, PosList, Md5, couch_util:md5_init(), Fun, Acc).
-
-foldl_decode(Fd, PosList, Md5, Enc, Fun, Acc) ->
- {DecDataFun, DecEndFun} = case Enc of
- gzip ->
- ungzip_init();
- identity ->
- identity_enc_dec_funs()
- end,
- Result = foldl_decode(
- DecDataFun, Fd, PosList, Md5, couch_util:md5_init(), Fun, Acc
- ),
- DecEndFun(),
- Result.
-
-foldl(_Fd, [], Md5, Md5Acc, _Fun, Acc) ->
- Md5 = couch_util:md5_final(Md5Acc),
- Acc;
-foldl(Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) -> % 0110 UPGRADE CODE
- foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc);
-foldl(Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, Bin)),
- Fun(Bin, Acc);
-foldl(Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
- foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
-foldl(Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- foldl(Fd, Rest, Md5, couch_util:md5_update(Md5Acc, Bin), Fun, Fun(Bin, Acc)).
-
-range_foldl(Fd, PosList, From, To, Fun, Acc) ->
- range_foldl(Fd, PosList, From, To, 0, Fun, Acc).
-
-range_foldl(_Fd, _PosList, _From, To, Off, _Fun, Acc) when Off >= To ->
- Acc;
-range_foldl(Fd, [Pos|Rest], From, To, Off, Fun, Acc) when is_integer(Pos) -> % old-style attachment
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- range_foldl(Fd, [{Pos, iolist_size(Bin)}] ++ Rest, From, To, Off, Fun, Acc);
-range_foldl(Fd, [{_Pos, Size}|Rest], From, To, Off, Fun, Acc) when From > Off + Size ->
- range_foldl(Fd, Rest, From, To, Off + Size, Fun, Acc);
-range_foldl(Fd, [{Pos, Size}|Rest], From, To, Off, Fun, Acc) ->
- {ok, Bin} = couch_file:pread_iolist(Fd, Pos),
- Bin1 = if
- From =< Off andalso To >= Off + Size -> Bin; %% the whole block is covered
- true ->
- PrefixLen = clip(From - Off, 0, Size),
- PostfixLen = clip(Off + Size - To, 0, Size),
- MatchLen = Size - PrefixLen - PostfixLen,
- <<_Prefix:PrefixLen/binary,Match:MatchLen/binary,_Postfix:PostfixLen/binary>> = iolist_to_binary(Bin),
- Match
- end,
- range_foldl(Fd, Rest, From, To, Off + Size, Fun, Fun(Bin1, Acc)).
-
-clip(Value, Lo, Hi) ->
- if
- Value < Lo -> Lo;
- Value > Hi -> Hi;
- true -> Value
- end.
-
-foldl_decode(_DecFun, _Fd, [], Md5, Md5Acc, _Fun, Acc) ->
- Md5 = couch_util:md5_final(Md5Acc),
- Acc;
-foldl_decode(DecFun, Fd, [{Pos, _Size}], Md5, Md5Acc, Fun, Acc) ->
- foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc);
-foldl_decode(DecFun, Fd, [Pos], Md5, Md5Acc, Fun, Acc) ->
- {ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
- Md5 = couch_util:md5_final(couch_util:md5_update(Md5Acc, EncBin)),
- Bin = DecFun(EncBin),
- Fun(Bin, Acc);
-foldl_decode(DecFun, Fd, [{Pos, _Size}|Rest], Md5, Md5Acc, Fun, Acc) ->
- foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc);
-foldl_decode(DecFun, Fd, [Pos|Rest], Md5, Md5Acc, Fun, Acc) ->
- {ok, EncBin} = couch_file:pread_iolist(Fd, Pos),
- Bin = DecFun(EncBin),
- Md5Acc2 = couch_util:md5_update(Md5Acc, EncBin),
- foldl_decode(DecFun, Fd, Rest, Md5, Md5Acc2, Fun, Fun(Bin, Acc)).
-
-gzip_init(Options) ->
- case couch_util:get_value(compression_level, Options, 0) of
- Lvl when Lvl >= 1 andalso Lvl =< 9 ->
- Z = zlib:open(),
- % 15 = ?MAX_WBITS (defined in the zlib module)
- % the 16 + ?MAX_WBITS formula was obtained by inspecting zlib:gzip/1
- ok = zlib:deflateInit(Z, Lvl, deflated, 16 + 15, 8, default),
- {
- fun(Data) ->
- zlib:deflate(Z, Data)
- end,
- fun() ->
- Last = zlib:deflate(Z, [], finish),
- ok = zlib:deflateEnd(Z),
- ok = zlib:close(Z),
- Last
- end
- };
- _ ->
- identity_enc_dec_funs()
- end.
-
-ungzip_init() ->
- Z = zlib:open(),
- zlib:inflateInit(Z, 16 + 15),
- {
- fun(Data) ->
- zlib:inflate(Z, Data)
- end,
- fun() ->
- ok = zlib:inflateEnd(Z),
- ok = zlib:close(Z)
- end
- }.
-
-identity_enc_dec_funs() ->
- {
- fun(Data) -> Data end,
- fun() -> [] end
- }.
-
-write(_Pid, <<>>) ->
- ok;
-write(Pid, Bin) ->
- gen_server:call(Pid, {write, Bin}, infinity).
-
-
-init({Fd, Options}) ->
- {EncodingFun, EndEncodingFun} =
- case couch_util:get_value(encoding, Options, identity) of
- identity ->
- identity_enc_dec_funs();
- gzip ->
- gzip_init(Options)
- end,
- {ok, #stream{
- fd=Fd,
- md5=couch_util:md5_init(),
- identity_md5=couch_util:md5_init(),
- encoding_fun=EncodingFun,
- end_encoding_fun=EndEncodingFun,
- max_buffer=couch_util:get_value(
- buffer_size, Options, ?DEFAULT_BUFFER_SIZE)
- }
- }.
-
-terminate(_Reason, _Stream) ->
- ok.
-
-handle_call({write, Bin}, _From, Stream) ->
- BinSize = iolist_size(Bin),
- #stream{
- fd = Fd,
- written_len = WrittenLen,
- written_pointers = Written,
- buffer_len = BufferLen,
- buffer_list = Buffer,
- max_buffer = Max,
- md5 = Md5,
- identity_md5 = IdenMd5,
- identity_len = IdenLen,
- encoding_fun = EncodingFun} = Stream,
- if BinSize + BufferLen > Max ->
- WriteBin = lists:reverse(Buffer, [Bin]),
- IdenMd5_2 = couch_util:md5_update(IdenMd5, WriteBin),
- case EncodingFun(WriteBin) of
- [] ->
- % case where the encoder did some internal buffering
- % (zlib does it for example)
- WrittenLen2 = WrittenLen,
- Md5_2 = Md5,
- Written2 = Written;
- WriteBin2 ->
- {ok, Pos, _} = couch_file:append_binary(Fd, WriteBin2),
- WrittenLen2 = WrittenLen + iolist_size(WriteBin2),
- Md5_2 = couch_util:md5_update(Md5, WriteBin2),
- Written2 = [{Pos, iolist_size(WriteBin2)}|Written]
- end,
-
- {reply, ok, Stream#stream{
- written_len=WrittenLen2,
- written_pointers=Written2,
- buffer_list=[],
- buffer_len=0,
- md5=Md5_2,
- identity_md5=IdenMd5_2,
- identity_len=IdenLen + BinSize}};
- true ->
- {reply, ok, Stream#stream{
- buffer_list=[Bin|Buffer],
- buffer_len=BufferLen + BinSize,
- identity_len=IdenLen + BinSize}}
- end;
-handle_call(close, _From, Stream) ->
- #stream{
- fd = Fd,
- written_len = WrittenLen,
- written_pointers = Written,
- buffer_list = Buffer,
- md5 = Md5,
- identity_md5 = IdenMd5,
- identity_len = IdenLen,
- encoding_fun = EncodingFun,
- end_encoding_fun = EndEncodingFun} = Stream,
-
- WriteBin = lists:reverse(Buffer),
- IdenMd5Final = couch_util:md5_final(couch_util:md5_update(IdenMd5, WriteBin)),
- WriteBin2 = EncodingFun(WriteBin) ++ EndEncodingFun(),
- Md5Final = couch_util:md5_final(couch_util:md5_update(Md5, WriteBin2)),
- Result = case WriteBin2 of
- [] ->
- {lists:reverse(Written), WrittenLen, IdenLen, Md5Final, IdenMd5Final};
- _ ->
- {ok, Pos, _} = couch_file:append_binary(Fd, WriteBin2),
- StreamInfo = lists:reverse(Written, [{Pos, iolist_size(WriteBin2)}]),
- StreamLen = WrittenLen + iolist_size(WriteBin2),
- {StreamInfo, StreamLen, IdenLen, Md5Final, IdenMd5Final}
- end,
- {stop, normal, Result, Stream}.
-
-handle_cast(_Msg, State) ->
- {noreply,State}.
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-handle_info(_Info, State) ->
- {noreply, State}.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_sup.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_sup.erl b/src/couch/src/couch_sup.erl
deleted file mode 100644
index 3508d4f..0000000
--- a/src/couch/src/couch_sup.erl
+++ /dev/null
@@ -1,159 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_sup).
--behaviour(supervisor).
--behaviour(config_listener).
-
-
--export([
- start_link/0,
- init/1,
- handle_config_change/5
-]).
-
-
--include_lib("couch/include/couch_db.hrl").
-
-
-start_link() ->
- write_pidfile(),
- notify_starting(),
-
- case supervisor:start_link({local, ?MODULE}, ?MODULE, []) of
- {ok, _} = Resp ->
- notify_started(),
- notify_uris(),
- write_uris(),
- ok = config:listen_for_changes(?MODULE, nil),
- Resp;
- Else ->
- notify_error(Else),
- Else
- end.
-
-
-init(_Args) ->
- twig:log(info, "Starting ~s", [?MODULE]),
- {ok, {{one_for_one,10, 60}, [
- {
- couch_primary_services,
- {couch_primary_sup, start_link, []},
- permanent,
- infinity,
- supervisor,
- [couch_primary_sup]
- },
- {
- couch_secondary_services,
- {couch_secondary_sup, start_link, []},
- permanent,
- infinity,
- supervisor,
- [couch_secondary_sup]
- }
- ]}}.
-
-
-handle_config_change("daemons", _, _, _, _) ->
- exit(whereis(couch_server_sup), shutdown),
- remove_handler;
-handle_config_change("couchdb", "util_driver_dir", _, _, _) ->
- [Pid] = [P || {collation_driver, P, _, _}
- <- supervisor:which_children(couch_primary_services)],
- Pid ! reload_driver,
- {ok, nil};
-handle_config_change(_, _, _, _, _) ->
- {ok, nil}.
-
-
-notify_starting() ->
- io:format("Apache CouchDB ~s (LogLevel=~s) is starting.~n", [
- couch_server:get_version(),
- config:get("log", "level", "info")
- ]).
-
-
-notify_started() ->
- io:format("Apache CouchDB has started. Time to relax.~n").
-
-
-notify_error(Error) ->
- io:format("Error starting Apache CouchDB:~n~n ~p~n~n", [Error]).
-
-
-notify_uris() ->
- lists:foreach(fun(Uri) ->
- ?LOG_INFO("Apache CouchDB has started on ~s", [Uri])
- end, get_uris()).
-
-
-write_pidfile() ->
- case init:get_argument(pidfile) of
- {ok, [PidFile]} ->
- write_file(PidFile, os:getpid());
- _ ->
- ok
- end.
-
-
-write_uris() ->
- case config:get("couchdb", "uri_file", null) of
- null ->
- ok;
- UriFile ->
- Lines = [io_lib:format("~s~n", [Uri]) || Uri <- get_uris()],
- write_file(UriFile, Lines)
- end.
-
-
-get_uris() ->
- Ip = config:get("httpd", "bind_address"),
- lists:flatmap(fun(Uri) ->
- case get_uri(Uri, Ip) of
- undefined -> [];
- Else -> [Else]
- end
- end, [couch_httpd, https]).
-
-
-get_uri(Name, Ip) ->
- case get_port(Name) of
- undefined ->
- undefined;
- Port ->
- io_lib:format("~s://~s:~w/", [get_scheme(Name), Ip, Port])
- end.
-
-
-get_scheme(couch_httpd) -> "http";
-get_scheme(https) -> "https".
-
-
-get_port(Name) ->
- try
- mochiweb_socket_server:get(Name, port)
- catch
- exit:{noproc, _} ->
- undefined
- end.
-
-
-write_file(FileName, Contents) ->
- case file:write_file(FileName, Contents) of
- ok ->
- ok;
- {error, Reason} ->
- Args = [FileName, file:format_error(Reason)],
- io:format(standard_error, "Failed ot write ~s :: ~s", Args),
- throw({error, Reason})
- end.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_task_status.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_task_status.erl b/src/couch/src/couch_task_status.erl
deleted file mode 100644
index ea9821f..0000000
--- a/src/couch/src/couch_task_status.erl
+++ /dev/null
@@ -1,151 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_task_status).
--behaviour(gen_server).
-
-% This module is used to track the status of long running tasks.
-% Long running tasks register themselves, via a call to add_task/1, and then
-% update their status properties via update/1. The status of a task is a
-% list of properties. Each property is a tuple, with the first element being
-% either an atom or a binary and the second element must be an EJSON value. When
-% a task updates its status, it can override some or all of its properties.
-% The properties {started_on, UnitTimestamp}, {updated_on, UnixTimestamp} and
-% {pid, ErlangPid} are automatically added by this module.
-% When a tracked task dies, its status will be automatically removed from
-% memory. To get the tasks list, call the all/0 function.
-
--export([start_link/0, stop/0]).
--export([all/0, add_task/1, update/1, get/1, set_update_frequency/1]).
--export([is_task_added/0]).
-
--export([init/1, terminate/2, code_change/3]).
--export([handle_call/3, handle_cast/2, handle_info/2]).
-
--include_lib("couch/include/couch_db.hrl").
-
--define(set(L, K, V), lists:keystore(K, 1, L, {K, V})).
-
-
-start_link() ->
- gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-
-
-stop() ->
- gen_server:cast(?MODULE, stop).
-
-
-all() ->
- gen_server:call(?MODULE, all).
-
-
-add_task(Props) ->
- put(task_status_update, {{0, 0, 0}, 0}),
- Ts = timestamp(),
- TaskProps = lists:ukeysort(
- 1, [{started_on, Ts}, {updated_on, Ts} | Props]),
- put(task_status_props, TaskProps),
- gen_server:call(?MODULE, {add_task, TaskProps}).
-
-
-is_task_added() ->
- is_list(erlang:get(task_status_props)).
-
-
-set_update_frequency(Msecs) ->
- put(task_status_update, {{0, 0, 0}, Msecs * 1000}).
-
-
-update(Props) ->
- MergeProps = lists:ukeysort(1, Props),
- TaskProps = lists:ukeymerge(1, MergeProps, erlang:get(task_status_props)),
- put(task_status_props, TaskProps),
- maybe_persist(TaskProps).
-
-
-get(Props) when is_list(Props) ->
- TaskProps = erlang:get(task_status_props),
- [couch_util:get_value(P, TaskProps) || P <- Props];
-get(Prop) ->
- TaskProps = erlang:get(task_status_props),
- couch_util:get_value(Prop, TaskProps).
-
-
-maybe_persist(TaskProps0) ->
- {LastUpdateTime, Frequency} = erlang:get(task_status_update),
- case timer:now_diff(Now = now(), LastUpdateTime) >= Frequency of
- true ->
- put(task_status_update, {Now, Frequency}),
- TaskProps = ?set(TaskProps0, updated_on, timestamp(Now)),
- gen_server:cast(?MODULE, {update_status, self(), TaskProps});
- false ->
- ok
- end.
-
-
-init([]) ->
- % read configuration settings and register for configuration changes
- ets:new(?MODULE, [ordered_set, protected, named_table]),
- {ok, nil}.
-
-
-terminate(_Reason,_State) ->
- ok.
-
-
-handle_call({add_task, TaskProps}, {From, _}, Server) ->
- case ets:lookup(?MODULE, From) of
- [] ->
- true = ets:insert(?MODULE, {From, TaskProps}),
- erlang:monitor(process, From),
- {reply, ok, Server};
- [_] ->
- {reply, {add_task_error, already_registered}, Server}
- end;
-handle_call(all, _, Server) ->
- All = [
- [{pid, ?l2b(pid_to_list(Pid))} | TaskProps]
- ||
- {Pid, TaskProps} <- ets:tab2list(?MODULE)
- ],
- {reply, All, Server}.
-
-
-handle_cast({update_status, Pid, NewProps}, Server) ->
- case ets:lookup(?MODULE, Pid) of
- [{Pid, _CurProps}] ->
- ?LOG_DEBUG("New task status for ~p: ~p", [Pid, NewProps]),
- true = ets:insert(?MODULE, {Pid, NewProps});
- _ ->
- % Task finished/died in the meanwhile and we must have received
- % a monitor message before this call - ignore.
- ok
- end,
- {noreply, Server};
-handle_cast(stop, State) ->
- {stop, normal, State}.
-
-handle_info({'DOWN', _MonitorRef, _Type, Pid, _Info}, Server) ->
- %% should we also erlang:demonitor(_MonitorRef), ?
- ets:delete(?MODULE, Pid),
- {noreply, Server}.
-
-
-code_change(_OldVsn, State, _Extra) ->
- {ok, State}.
-
-
-timestamp() ->
- timestamp(now()).
-
-timestamp({Mega, Secs, _}) ->
- Mega * 1000000 + Secs.
http://git-wip-us.apache.org/repos/asf/couchdb/blob/ed98610c/src/couch/src/couch_users_db.erl
----------------------------------------------------------------------
diff --git a/src/couch/src/couch_users_db.erl b/src/couch/src/couch_users_db.erl
deleted file mode 100644
index 76acfee..0000000
--- a/src/couch/src/couch_users_db.erl
+++ /dev/null
@@ -1,110 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-% http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_users_db).
-
--export([before_doc_update/2, after_doc_read/2]).
-
--include_lib("couch/include/couch_db.hrl").
-
--define(NAME, <<"name">>).
--define(PASSWORD, <<"password">>).
--define(DERIVED_KEY, <<"derived_key">>).
--define(PASSWORD_SCHEME, <<"password_scheme">>).
--define(PBKDF2, <<"pbkdf2">>).
--define(ITERATIONS, <<"iterations">>).
--define(SALT, <<"salt">>).
--define(replace(L, K, V), lists:keystore(K, 1, L, {K, V})).
-
-% If the request's userCtx identifies an admin
-% -> save_doc (see below)
-%
-% If the request's userCtx.name is null:
-% -> save_doc
-% // this is an anonymous user registering a new document
-% // in case a user doc with the same id already exists, the anonymous
-% // user will get a regular doc update conflict.
-% If the request's userCtx.name doesn't match the doc's name
-% -> 404 // Not Found
-% Else
-% -> save_doc
-before_doc_update(Doc, #db{user_ctx = UserCtx} = Db) ->
- #user_ctx{name=Name} = UserCtx,
- DocName = get_doc_name(Doc),
- case (catch couch_db:check_is_admin(Db)) of
- ok ->
- save_doc(Doc);
- _ when Name =:= DocName orelse Name =:= null ->
- save_doc(Doc);
- _ ->
- throw(not_found)
- end.
-
-% If newDoc.password == null || newDoc.password == undefined:
-% ->
-% noop
-% Else -> // calculate password hash server side
-% newDoc.password_sha = hash_pw(newDoc.password + salt)
-% newDoc.salt = salt
-% newDoc.password = null
-save_doc(#doc{body={Body}} = Doc) ->
- case couch_util:get_value(?PASSWORD, Body) of
- null -> % server admins don't have a user-db password entry
- Doc;
- undefined ->
- Doc;
- ClearPassword ->
- Iterations = list_to_integer(config:get("couch_httpd_auth", "iterations", "1000")),
- Salt = couch_uuids:random(),
- DerivedKey = couch_passwords:pbkdf2(ClearPassword, Salt, Iterations),
- Body0 = [{?PASSWORD_SCHEME, ?PBKDF2}, {?ITERATIONS, Iterations}|Body],
- Body1 = ?replace(Body0, ?DERIVED_KEY, DerivedKey),
- Body2 = ?replace(Body1, ?SALT, Salt),
- Body3 = proplists:delete(?PASSWORD, Body2),
- Doc#doc{body={Body3}}
- end.
-
-% If the doc is a design doc
-% If the request's userCtx identifies an admin
-% -> return doc
-% Else
-% -> 403 // Forbidden
-% If the request's userCtx identifies an admin
-% -> return doc
-% If the request's userCtx.name doesn't match the doc's name
-% -> 404 // Not Found
-% Else
-% -> return doc
-after_doc_read(#doc{id = <<?DESIGN_DOC_PREFIX, _/binary>>} = Doc, Db) ->
- case (catch couch_db:check_is_admin(Db)) of
- ok ->
- Doc;
- _ ->
- throw({forbidden,
- <<"Only administrators can view design docs in the users database.">>})
- end;
-after_doc_read(Doc, #db{user_ctx = UserCtx} = Db) ->
- #user_ctx{name=Name} = UserCtx,
- DocName = get_doc_name(Doc),
- case (catch couch_db:check_is_admin(Db)) of
- ok ->
- Doc;
- _ when Name =:= DocName ->
- Doc;
- _ ->
- throw(not_found)
- end.
-
-get_doc_name(#doc{id= <<"org.couchdb.user:", Name/binary>>}) ->
- Name;
-get_doc_name(_) ->
- undefined.