You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by ch...@apache.org on 2014/08/11 22:23:14 UTC
[38/50] [abbrv] Move files out of test/couchdb into top level test/
folder
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_attachments_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_attachments_tests.erl b/test/couchdb_attachments_tests.erl
new file mode 100644
index 0000000..cf59785
--- /dev/null
+++ b/test/couchdb_attachments_tests.erl
@@ -0,0 +1,638 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_attachments_tests).
+
+-include("couch_eunit.hrl").
+-include_lib("couchdb/couch_db.hrl").
+
+-define(COMPRESSION_LEVEL, 8).
+-define(ATT_BIN_NAME, <<"logo.png">>).
+-define(ATT_TXT_NAME, <<"file.erl">>).
+-define(FIXTURE_PNG, filename:join([?FIXTURESDIR, "logo.png"])).
+-define(FIXTURE_TXT, ?FILE).
+-define(TIMEOUT, 1000).
+-define(TIMEOUT_EUNIT, 10).
+-define(TIMEWAIT, 100).
+-define(i2l(I), integer_to_list(I)).
+
+
+start() ->
+ {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+ % ensure in default compression settings for attachments_compression_tests
+ couch_config:set("attachments", "compression_level",
+ ?i2l(?COMPRESSION_LEVEL), false),
+ couch_config:set("attachments", "compressible_types", "text/*", false),
+ Pid.
+
+stop(Pid) ->
+ erlang:monitor(process, Pid),
+ couch_server_sup:stop(),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, server_stop})
+ end.
+
+setup() ->
+ DbName = ?tempdb(),
+ {ok, Db} = couch_db:create(DbName, []),
+ ok = couch_db:close(Db),
+ Addr = couch_config:get("httpd", "bind_address", any),
+ Port = mochiweb_socket_server:get(couch_httpd, port),
+ Host = Addr ++ ":" ++ ?i2l(Port),
+ {Host, ?b2l(DbName)}.
+
+setup({binary, standalone}) ->
+ {Host, DbName} = setup(),
+ setup_att(fun create_standalone_png_att/2, Host, DbName, ?FIXTURE_PNG);
+setup({text, standalone}) ->
+ {Host, DbName} = setup(),
+ setup_att(fun create_standalone_text_att/2, Host, DbName, ?FIXTURE_TXT);
+setup({binary, inline}) ->
+ {Host, DbName} = setup(),
+ setup_att(fun create_inline_png_att/2, Host, DbName, ?FIXTURE_PNG);
+setup({text, inline}) ->
+ {Host, DbName} = setup(),
+ setup_att(fun create_inline_text_att/2, Host, DbName, ?FIXTURE_TXT);
+setup(compressed) ->
+ {Host, DbName} = setup(),
+ setup_att(fun create_already_compressed_att/2, Host, DbName, ?FIXTURE_TXT).
+setup_att(Fun, Host, DbName, File) ->
+ HttpHost = "http://" ++ Host,
+ AttUrl = Fun(HttpHost, DbName),
+ {ok, Data} = file:read_file(File),
+ DocUrl = string:join([HttpHost, DbName, "doc"], "/"),
+ Helpers = {DbName, DocUrl, AttUrl},
+ {Data, Helpers}.
+
+teardown(_, {_, {DbName, _, _}}) ->
+ teardown(DbName).
+
+teardown({_, DbName}) ->
+ teardown(DbName);
+teardown(DbName) ->
+ ok = couch_server:delete(?l2b(DbName), []),
+ ok.
+
+
+attachments_test_() ->
+ {
+ "Attachments tests",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ [
+ attachments_md5_tests(),
+ attachments_compression_tests()
+ ]
+ }
+ }.
+
+attachments_md5_tests() ->
+ {
+ "Attachments MD5 tests",
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_upload_attachment_without_md5/1,
+ fun should_upload_attachment_by_chunks_without_md5/1,
+ fun should_upload_attachment_with_valid_md5_header/1,
+ fun should_upload_attachment_by_chunks_with_valid_md5_header/1,
+ fun should_upload_attachment_by_chunks_with_valid_md5_trailer/1,
+ fun should_reject_attachment_with_invalid_md5/1,
+ fun should_reject_chunked_attachment_with_invalid_md5/1,
+ fun should_reject_chunked_attachment_with_invalid_md5_trailer/1
+ ]
+ }
+ }.
+
+attachments_compression_tests() ->
+ Funs = [
+ fun should_get_att_without_accept_gzip_encoding/2,
+ fun should_get_att_with_accept_gzip_encoding/2,
+ fun should_get_att_with_accept_deflate_encoding/2,
+ fun should_return_406_response_on_unsupported_encoding/2,
+ fun should_get_doc_with_att_data/2,
+ fun should_get_doc_with_att_data_stub/2
+ ],
+ {
+ "Attachments compression tests",
+ [
+ {
+ "Created via Attachments API",
+ created_attachments_compression_tests(standalone, Funs)
+ },
+ {
+ "Created inline via Document API",
+ created_attachments_compression_tests(inline, Funs)
+ },
+ {
+ "Created already been compressed via Attachments API",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [{compressed, Fun} || Fun <- Funs]
+ }
+ },
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_not_create_compressed_att_with_deflate_encoding/1,
+ fun should_not_create_compressed_att_with_compress_encoding/1,
+ fun should_create_compressible_att_with_ctype_params/1
+ ]
+ }
+ ]
+ }.
+
+created_attachments_compression_tests(Mod, Funs) ->
+ [
+ {
+ "Compressiable attachments",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [{{text, Mod}, Fun} || Fun <- Funs]
+ }
+ },
+ {
+ "Uncompressiable attachments",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [{{binary, Mod}, Fun} || Fun <- Funs]
+ }
+ }
+ ].
+
+
+
+should_upload_attachment_without_md5({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ Body = "We all live in a yellow submarine!",
+ Headers = [
+ {"Content-Length", "34"},
+ {"Content-Type", "text/plain"},
+ {"Host", Host}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(201, Code),
+ ?assertEqual(true, get_json(Json, [<<"ok">>]))
+ end).
+
+should_upload_attachment_by_chunks_without_md5({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ AttData = <<"We all live in a yellow submarine!">>,
+ <<Part1:21/binary, Part2:13/binary>> = AttData,
+ Body = chunked_body([Part1, Part2]),
+ Headers = [
+ {"Content-Type", "text/plain"},
+ {"Transfer-Encoding", "chunked"},
+ {"Host", Host}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(201, Code),
+ ?assertEqual(true, get_json(Json, [<<"ok">>]))
+ end).
+
+should_upload_attachment_with_valid_md5_header({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ Body = "We all live in a yellow submarine!",
+ Headers = [
+ {"Content-Length", "34"},
+ {"Content-Type", "text/plain"},
+ {"Content-MD5", ?b2l(base64:encode(couch_util:md5(Body)))},
+ {"Host", Host}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(201, Code),
+ ?assertEqual(true, get_json(Json, [<<"ok">>]))
+ end).
+
+should_upload_attachment_by_chunks_with_valid_md5_header({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ AttData = <<"We all live in a yellow submarine!">>,
+ <<Part1:21/binary, Part2:13/binary>> = AttData,
+ Body = chunked_body([Part1, Part2]),
+ Headers = [
+ {"Content-Type", "text/plain"},
+ {"Content-MD5", ?b2l(base64:encode(couch_util:md5(AttData)))},
+ {"Host", Host},
+ {"Transfer-Encoding", "chunked"}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(201, Code),
+ ?assertEqual(true, get_json(Json, [<<"ok">>]))
+ end).
+
+should_upload_attachment_by_chunks_with_valid_md5_trailer({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ AttData = <<"We all live in a yellow submarine!">>,
+ <<Part1:21/binary, Part2:13/binary>> = AttData,
+ Body = [chunked_body([Part1, Part2]),
+ "Content-MD5: ", base64:encode(couch_util:md5(AttData)),
+ "\r\n"],
+ Headers = [
+ {"Content-Type", "text/plain"},
+ {"Host", Host},
+ {"Trailer", "Content-MD5"},
+ {"Transfer-Encoding", "chunked"}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(201, Code),
+ ?assertEqual(true, get_json(Json, [<<"ok">>]))
+ end).
+
+should_reject_attachment_with_invalid_md5({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ Body = "We all live in a yellow submarine!",
+ Headers = [
+ {"Content-Length", "34"},
+ {"Content-Type", "text/plain"},
+ {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))},
+ {"Host", Host}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(400, Code),
+ ?assertEqual(<<"content_md5_mismatch">>,
+ get_json(Json, [<<"error">>]))
+ end).
+
+
+should_reject_chunked_attachment_with_invalid_md5({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ AttData = <<"We all live in a yellow submarine!">>,
+ <<Part1:21/binary, Part2:13/binary>> = AttData,
+ Body = chunked_body([Part1, Part2]),
+ Headers = [
+ {"Content-Type", "text/plain"},
+ {"Content-MD5", ?b2l(base64:encode(<<"foobar!">>))},
+ {"Host", Host},
+ {"Transfer-Encoding", "chunked"}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(400, Code),
+ ?assertEqual(<<"content_md5_mismatch">>,
+ get_json(Json, [<<"error">>]))
+ end).
+
+should_reject_chunked_attachment_with_invalid_md5_trailer({Host, DbName}) ->
+ ?_test(begin
+ AttUrl = string:join(["", DbName, ?docid(), "readme.txt"], "/"),
+ AttData = <<"We all live in a yellow submarine!">>,
+ <<Part1:21/binary, Part2:13/binary>> = AttData,
+ Body = [chunked_body([Part1, Part2]),
+ "Content-MD5: ", base64:encode(<<"foobar!">>),
+ "\r\n"],
+ Headers = [
+ {"Content-Type", "text/plain"},
+ {"Host", Host},
+ {"Trailer", "Content-MD5"},
+ {"Transfer-Encoding", "chunked"}
+ ],
+ {ok, Code, Json} = request("PUT", AttUrl, Headers, Body),
+ ?assertEqual(400, Code),
+ ?assertEqual(<<"content_md5_mismatch">>, get_json(Json, [<<"error">>]))
+ end).
+
+should_get_att_without_accept_gzip_encoding(_, {Data, {_, _, AttUrl}}) ->
+ ?_test(begin
+ {ok, Code, Headers, Body} = test_request:get(AttUrl),
+ ?assertEqual(200, Code),
+ ?assertNot(lists:member({"Content-Encoding", "gzip"}, Headers)),
+ ?assertEqual(Data, iolist_to_binary(Body))
+ end).
+
+should_get_att_with_accept_gzip_encoding(compressed, {Data, {_, _, AttUrl}}) ->
+ ?_test(begin
+ {ok, Code, Headers, Body} = test_request:get(
+ AttUrl, [{"Accept-Encoding", "gzip"}]),
+ ?assertEqual(200, Code),
+ ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)),
+ ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body)))
+ end);
+should_get_att_with_accept_gzip_encoding({text, _}, {Data, {_, _, AttUrl}}) ->
+ ?_test(begin
+ {ok, Code, Headers, Body} = test_request:get(
+ AttUrl, [{"Accept-Encoding", "gzip"}]),
+ ?assertEqual(200, Code),
+ ?assert(lists:member({"Content-Encoding", "gzip"}, Headers)),
+ ?assertEqual(Data, zlib:gunzip(iolist_to_binary(Body)))
+ end);
+should_get_att_with_accept_gzip_encoding({binary, _}, {Data, {_, _, AttUrl}}) ->
+ ?_test(begin
+ {ok, Code, Headers, Body} = test_request:get(
+ AttUrl, [{"Accept-Encoding", "gzip"}]),
+ ?assertEqual(200, Code),
+ ?assertEqual(undefined,
+ couch_util:get_value("Content-Encoding", Headers)),
+ ?assertEqual(Data, iolist_to_binary(Body))
+ end).
+
+should_get_att_with_accept_deflate_encoding(_, {Data, {_, _, AttUrl}}) ->
+ ?_test(begin
+ {ok, Code, Headers, Body} = test_request:get(
+ AttUrl, [{"Accept-Encoding", "deflate"}]),
+ ?assertEqual(200, Code),
+ ?assertEqual(undefined,
+ couch_util:get_value("Content-Encoding", Headers)),
+ ?assertEqual(Data, iolist_to_binary(Body))
+ end).
+
+should_return_406_response_on_unsupported_encoding(_, {_, {_, _, AttUrl}}) ->
+ ?_assertEqual(406,
+ begin
+ {ok, Code, _, _} = test_request:get(
+ AttUrl, [{"Accept-Encoding", "deflate, *;q=0"}]),
+ Code
+ end).
+
+should_get_doc_with_att_data(compressed, {Data, {_, DocUrl, _}}) ->
+ ?_test(begin
+ Url = DocUrl ++ "?attachments=true",
+ {ok, Code, _, Body} = test_request:get(
+ Url, [{"Accept", "application/json"}]),
+ ?assertEqual(200, Code),
+ Json = ejson:decode(Body),
+ AttJson = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+ AttData = couch_util:get_nested_json_value(
+ AttJson, [<<"data">>]),
+ ?assertEqual(
+ <<"text/plain">>,
+ couch_util:get_nested_json_value(AttJson,[<<"content_type">>])),
+ ?assertEqual(Data, base64:decode(AttData))
+ end);
+should_get_doc_with_att_data({text, _}, {Data, {_, DocUrl, _}}) ->
+ ?_test(begin
+ Url = DocUrl ++ "?attachments=true",
+ {ok, Code, _, Body} = test_request:get(
+ Url, [{"Accept", "application/json"}]),
+ ?assertEqual(200, Code),
+ Json = ejson:decode(Body),
+ AttJson = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+ AttData = couch_util:get_nested_json_value(
+ AttJson, [<<"data">>]),
+ ?assertEqual(
+ <<"text/plain">>,
+ couch_util:get_nested_json_value(AttJson,[<<"content_type">>])),
+ ?assertEqual(Data, base64:decode(AttData))
+ end);
+should_get_doc_with_att_data({binary, _}, {Data, {_, DocUrl, _}}) ->
+ ?_test(begin
+ Url = DocUrl ++ "?attachments=true",
+ {ok, Code, _, Body} = test_request:get(
+ Url, [{"Accept", "application/json"}]),
+ ?assertEqual(200, Code),
+ Json = ejson:decode(Body),
+ AttJson = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_BIN_NAME]),
+ AttData = couch_util:get_nested_json_value(
+ AttJson, [<<"data">>]),
+ ?assertEqual(
+ <<"image/png">>,
+ couch_util:get_nested_json_value(AttJson,[<<"content_type">>])),
+ ?assertEqual(Data, base64:decode(AttData))
+ end).
+
+should_get_doc_with_att_data_stub(compressed, {Data, {_, DocUrl, _}}) ->
+ ?_test(begin
+ Url = DocUrl ++ "?att_encoding_info=true",
+ {ok, Code, _, Body} = test_request:get(
+ Url, [{"Accept", "application/json"}]),
+ ?assertEqual(200, Code),
+ Json = ejson:decode(Body),
+ {AttJson} = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+ ?assertEqual(<<"gzip">>,
+ couch_util:get_value(<<"encoding">>, AttJson)),
+ AttLength = couch_util:get_value(<<"length">>, AttJson),
+ EncLength = couch_util:get_value(<<"encoded_length">>, AttJson),
+ ?assertEqual(AttLength, EncLength),
+ ?assertEqual(iolist_size(zlib:gzip(Data)), AttLength)
+ end);
+should_get_doc_with_att_data_stub({text, _}, {Data, {_, DocUrl, _}}) ->
+ ?_test(begin
+ Url = DocUrl ++ "?att_encoding_info=true",
+ {ok, Code, _, Body} = test_request:get(
+ Url, [{"Accept", "application/json"}]),
+ ?assertEqual(200, Code),
+ Json = ejson:decode(Body),
+ {AttJson} = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+ ?assertEqual(<<"gzip">>,
+ couch_util:get_value(<<"encoding">>, AttJson)),
+ AttEncLength = iolist_size(gzip(Data)),
+ ?assertEqual(AttEncLength,
+ couch_util:get_value(<<"encoded_length">>, AttJson)),
+ ?assertEqual(byte_size(Data),
+ couch_util:get_value(<<"length">>, AttJson))
+ end);
+should_get_doc_with_att_data_stub({binary, _}, {Data, {_, DocUrl, _}}) ->
+ ?_test(begin
+ Url = DocUrl ++ "?att_encoding_info=true",
+ {ok, Code, _, Body} = test_request:get(
+ Url, [{"Accept", "application/json"}]),
+ ?assertEqual(200, Code),
+ Json = ejson:decode(Body),
+ {AttJson} = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_BIN_NAME]),
+ ?assertEqual(undefined,
+ couch_util:get_value(<<"encoding">>, AttJson)),
+ ?assertEqual(undefined,
+ couch_util:get_value(<<"encoded_length">>, AttJson)),
+ ?assertEqual(byte_size(Data),
+ couch_util:get_value(<<"length">>, AttJson))
+ end).
+
+should_not_create_compressed_att_with_deflate_encoding({Host, DbName}) ->
+ ?_assertEqual(415,
+ begin
+ HttpHost = "http://" ++ Host,
+ AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"),
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ Body = zlib:compress(Data),
+ Headers = [
+ {"Content-Encoding", "deflate"},
+ {"Content-Type", "text/plain"}
+ ],
+ {ok, Code, _, _} = test_request:put(AttUrl, Headers, Body),
+ Code
+ end).
+
+should_not_create_compressed_att_with_compress_encoding({Host, DbName}) ->
+ % Note: As of OTP R13B04, it seems there's no LZW compression
+ % (i.e. UNIX compress utility implementation) lib in OTP.
+ % However there's a simple working Erlang implementation at:
+ % http://scienceblogs.com/goodmath/2008/01/simple_lempelziv_compression_i.php
+ ?_assertEqual(415,
+ begin
+ HttpHost = "http://" ++ Host,
+ AttUrl = string:join([HttpHost, DbName, ?docid(), "file.txt"], "/"),
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ Headers = [
+ {"Content-Encoding", "compress"},
+ {"Content-Type", "text/plain"}
+ ],
+ {ok, Code, _, _} = test_request:put(AttUrl, Headers, Data),
+ Code
+ end).
+
+should_create_compressible_att_with_ctype_params({Host, DbName}) ->
+ {timeout, ?TIMEOUT_EUNIT, ?_test(begin
+ HttpHost = "http://" ++ Host,
+ DocUrl = string:join([HttpHost, DbName, ?docid()], "/"),
+ AttUrl = string:join([DocUrl, ?b2l(?ATT_TXT_NAME)], "/"),
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ Headers = [{"Content-Type", "text/plain; charset=UTF-8"}],
+ {ok, Code0, _, _} = test_request:put(AttUrl, Headers, Data),
+ ?assertEqual(201, Code0),
+
+ {ok, Code1, _, Body} = test_request:get(
+ DocUrl ++ "?att_encoding_info=true"),
+ ?assertEqual(200, Code1),
+ Json = ejson:decode(Body),
+ {AttJson} = couch_util:get_nested_json_value(
+ Json, [<<"_attachments">>, ?ATT_TXT_NAME]),
+ ?assertEqual(<<"gzip">>,
+ couch_util:get_value(<<"encoding">>, AttJson)),
+ AttEncLength = iolist_size(gzip(Data)),
+ ?assertEqual(AttEncLength,
+ couch_util:get_value(<<"encoded_length">>, AttJson)),
+ ?assertEqual(byte_size(Data),
+ couch_util:get_value(<<"length">>, AttJson))
+ end)}.
+
+
+get_json(Json, Path) ->
+ couch_util:get_nested_json_value(Json, Path).
+
+to_hex(Val) ->
+ to_hex(Val, []).
+
+to_hex(0, Acc) ->
+ Acc;
+to_hex(Val, Acc) ->
+ to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+
+chunked_body(Chunks) ->
+ chunked_body(Chunks, []).
+
+chunked_body([], Acc) ->
+ iolist_to_binary(lists:reverse(Acc, "0\r\n"));
+chunked_body([Chunk | Rest], Acc) ->
+ Size = to_hex(size(Chunk)),
+ chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
+
+get_socket() ->
+ Options = [binary, {packet, 0}, {active, false}],
+ Addr = couch_config:get("httpd", "bind_address", any),
+ Port = mochiweb_socket_server:get(couch_httpd, port),
+ {ok, Sock} = gen_tcp:connect(Addr, Port, Options),
+ Sock.
+
+request(Method, Url, Headers, Body) ->
+ RequestHead = [Method, " ", Url, " HTTP/1.1"],
+ RequestHeaders = [[string:join([Key, Value], ": "), "\r\n"]
+ || {Key, Value} <- Headers],
+ Request = [RequestHead, "\r\n", RequestHeaders, "\r\n", Body, "\r\n"],
+ Sock = get_socket(),
+ gen_tcp:send(Sock, list_to_binary(lists:flatten(Request))),
+ timer:sleep(?TIMEWAIT), % must wait to receive complete response
+ {ok, R} = gen_tcp:recv(Sock, 0),
+ gen_tcp:close(Sock),
+ [Header, Body1] = re:split(R, "\r\n\r\n", [{return, binary}]),
+ {ok, {http_response, _, Code, _}, _} =
+ erlang:decode_packet(http, Header, []),
+ Json = ejson:decode(Body1),
+ {ok, Code, Json}.
+
+create_standalone_text_att(Host, DbName) ->
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"),
+ {ok, Code, _Headers, _Body} = test_request:put(
+ Url, [{"Content-Type", "text/plain"}], Data),
+ ?assertEqual(201, Code),
+ Url.
+
+create_standalone_png_att(Host, DbName) ->
+ {ok, Data} = file:read_file(?FIXTURE_PNG),
+ Url = string:join([Host, DbName, "doc", ?b2l(?ATT_BIN_NAME)], "/"),
+ {ok, Code, _Headers, _Body} = test_request:put(
+ Url, [{"Content-Type", "image/png"}], Data),
+ ?assertEqual(201, Code),
+ Url.
+
+create_inline_text_att(Host, DbName) ->
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ Url = string:join([Host, DbName, "doc"], "/"),
+ Doc = {[
+ {<<"_attachments">>, {[
+ {?ATT_TXT_NAME, {[
+ {<<"content_type">>, <<"text/plain">>},
+ {<<"data">>, base64:encode(Data)}
+ ]}
+ }]}}
+ ]},
+ {ok, Code, _Headers, _Body} = test_request:put(
+ Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)),
+ ?assertEqual(201, Code),
+ string:join([Url, ?b2l(?ATT_TXT_NAME)], "/").
+
+create_inline_png_att(Host, DbName) ->
+ {ok, Data} = file:read_file(?FIXTURE_PNG),
+ Url = string:join([Host, DbName, "doc"], "/"),
+ Doc = {[
+ {<<"_attachments">>, {[
+ {?ATT_BIN_NAME, {[
+ {<<"content_type">>, <<"image/png">>},
+ {<<"data">>, base64:encode(Data)}
+ ]}
+ }]}}
+ ]},
+ {ok, Code, _Headers, _Body} = test_request:put(
+ Url, [{"Content-Type", "application/json"}], ejson:encode(Doc)),
+ ?assertEqual(201, Code),
+ string:join([Url, ?b2l(?ATT_BIN_NAME)], "/").
+
+create_already_compressed_att(Host, DbName) ->
+ {ok, Data} = file:read_file(?FIXTURE_TXT),
+ Url = string:join([Host, DbName, "doc", ?b2l(?ATT_TXT_NAME)], "/"),
+ {ok, Code, _Headers, _Body} = test_request:put(
+ Url, [{"Content-Type", "text/plain"}, {"Content-Encoding", "gzip"}],
+ zlib:gzip(Data)),
+ ?assertEqual(201, Code),
+ Url.
+
+gzip(Data) ->
+ Z = zlib:open(),
+ ok = zlib:deflateInit(Z, ?COMPRESSION_LEVEL, deflated, 16 + 15, 8, default),
+ zlib:deflate(Z, Data),
+ Last = zlib:deflate(Z, [], finish),
+ ok = zlib:deflateEnd(Z),
+ ok = zlib:close(Z),
+ Last.
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_compaction_daemon.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_compaction_daemon.erl b/test/couchdb_compaction_daemon.erl
new file mode 100644
index 0000000..725a97b
--- /dev/null
+++ b/test/couchdb_compaction_daemon.erl
@@ -0,0 +1,231 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_compaction_daemon).
+
+-include("couch_eunit.hrl").
+-include_lib("couchdb/couch_db.hrl").
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
+-define(DELAY, 100).
+-define(TIMEOUT, 30000).
+-define(TIMEOUT_S, ?TIMEOUT div 1000).
+
+
+start() ->
+ {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+ couch_config:set("compaction_daemon", "check_interval", "3", false),
+ couch_config:set("compaction_daemon", "min_file_size", "100000", false),
+ Pid.
+
+stop(Pid) ->
+ erlang:monitor(process, Pid),
+ couch_server_sup:stop(),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, server_stop})
+ end.
+
+setup() ->
+ DbName = ?tempdb(),
+ {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]),
+ create_design_doc(Db),
+ ok = couch_db:close(Db),
+ DbName.
+
+teardown(DbName) ->
+ Configs = couch_config:get("compactions"),
+ lists:foreach(
+ fun({Key, _}) ->
+ ok = couch_config:delete("compactions", Key, false)
+ end,
+ Configs),
+ couch_server:delete(DbName, [?ADMIN_USER]),
+ ok.
+
+
+compaction_daemon_test_() ->
+ {
+ "Compaction daemon tests",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_compact_by_default_rule/1,
+ fun should_compact_by_dbname_rule/1
+ ]
+ }
+ }
+ }.
+
+
+should_compact_by_default_rule(DbName) ->
+ {timeout, ?TIMEOUT_S, ?_test(begin
+ {ok, Db} = couch_db:open_int(DbName, []),
+ populate(DbName, 70, 70, 200 * 1024),
+
+ {_, DbFileSize} = get_db_frag(DbName),
+ {_, ViewFileSize} = get_view_frag(DbName),
+
+ ok = couch_config:set("compactions", "_default",
+ "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]",
+ false),
+
+ ok = timer:sleep(4000), % something >= check_interval
+ wait_compaction_finished(DbName),
+ ok = couch_config:delete("compactions", "_default", false),
+
+ {DbFrag2, DbFileSize2} = get_db_frag(DbName),
+ {ViewFrag2, ViewFileSize2} = get_view_frag(DbName),
+
+ ?assert(DbFrag2 < 70),
+ ?assert(ViewFrag2 < 70),
+
+ ?assert(DbFileSize > DbFileSize2),
+ ?assert(ViewFileSize > ViewFileSize2),
+
+ ?assert(couch_db:is_idle(Db)),
+ ok = couch_db:close(Db)
+ end)}.
+
+should_compact_by_dbname_rule(DbName) ->
+ {timeout, ?TIMEOUT_S, ?_test(begin
+ {ok, Db} = couch_db:open_int(DbName, []),
+ populate(DbName, 70, 70, 200 * 1024),
+
+ {_, DbFileSize} = get_db_frag(DbName),
+ {_, ViewFileSize} = get_view_frag(DbName),
+
+ ok = couch_config:set("compactions", ?b2l(DbName),
+ "[{db_fragmentation, \"70%\"}, {view_fragmentation, \"70%\"}]",
+ false),
+
+ ok = timer:sleep(4000), % something >= check_interval
+ wait_compaction_finished(DbName),
+ ok = couch_config:delete("compactions", ?b2l(DbName), false),
+
+ {DbFrag2, DbFileSize2} = get_db_frag(DbName),
+ {ViewFrag2, ViewFileSize2} = get_view_frag(DbName),
+
+ ?assert(DbFrag2 < 70),
+ ?assert(ViewFrag2 < 70),
+
+ ?assert(DbFileSize > DbFileSize2),
+ ?assert(ViewFileSize > ViewFileSize2),
+
+ ?assert(couch_db:is_idle(Db)),
+ ok = couch_db:close(Db)
+ end)}.
+
+
+create_design_doc(Db) ->
+ DDoc = couch_doc:from_json_obj({[
+ {<<"_id">>, <<"_design/foo">>},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>, {[
+ {<<"foo">>, {[
+ {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+ ]}},
+ {<<"foo2">>, {[
+ {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+ ]}},
+ {<<"foo3">>, {[
+ {<<"map">>, <<"function(doc) { emit(doc._id, doc); }">>}
+ ]}}
+ ]}}
+ ]}),
+ {ok, _} = couch_db:update_docs(Db, [DDoc]),
+ {ok, _} = couch_db:ensure_full_commit(Db),
+ ok.
+
+populate(DbName, DbFrag, ViewFrag, MinFileSize) ->
+ {CurDbFrag, DbFileSize} = get_db_frag(DbName),
+ {CurViewFrag, ViewFileSize} = get_view_frag(DbName),
+ populate(DbName, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag,
+ lists:min([DbFileSize, ViewFileSize])).
+
+populate(_Db, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag, FileSize)
+ when CurDbFrag >= DbFrag, CurViewFrag >= ViewFrag, FileSize >= MinFileSize ->
+ ok;
+populate(DbName, DbFrag, ViewFrag, MinFileSize, _, _, _) ->
+ update(DbName),
+ {CurDbFrag, DbFileSize} = get_db_frag(DbName),
+ {CurViewFrag, ViewFileSize} = get_view_frag(DbName),
+ populate(DbName, DbFrag, ViewFrag, MinFileSize, CurDbFrag, CurViewFrag,
+ lists:min([DbFileSize, ViewFileSize])).
+
+update(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ lists:foreach(fun(_) ->
+ Doc = couch_doc:from_json_obj({[{<<"_id">>, couch_uuids:new()}]}),
+ {ok, _} = couch_db:update_docs(Db, [Doc]),
+ query_view(Db#db.name)
+ end, lists:seq(1, 200)),
+ couch_db:close(Db).
+
+db_url(DbName) ->
+ Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+ Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+ "http://" ++ Addr ++ ":" ++ Port ++ "/" ++ ?b2l(DbName).
+
+query_view(DbName) ->
+ {ok, Code, _Headers, _Body} = test_request:get(
+ db_url(DbName) ++ "/_design/foo/_view/foo"),
+ ?assertEqual(200, Code).
+
+get_db_frag(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, Info} = couch_db:get_db_info(Db),
+ couch_db:close(Db),
+ FileSize = couch_util:get_value(disk_size, Info),
+ DataSize = couch_util:get_value(data_size, Info),
+ {round((FileSize - DataSize) / FileSize * 100), FileSize}.
+
+get_view_frag(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, Info} = couch_mrview:get_info(Db, <<"_design/foo">>),
+ couch_db:close(Db),
+ FileSize = couch_util:get_value(disk_size, Info),
+ DataSize = couch_util:get_value(data_size, Info),
+ {round((FileSize - DataSize) / FileSize * 100), FileSize}.
+
+wait_compaction_finished(DbName) ->
+ Parent = self(),
+ Loop = spawn_link(fun() -> wait_loop(DbName, Parent) end),
+ receive
+ {done, Loop} ->
+ ok
+ after ?TIMEOUT ->
+ erlang:error(
+ {assertion_failed,
+ [{module, ?MODULE}, {line, ?LINE},
+ {reason, "Compaction timeout"}]})
+ end.
+
+wait_loop(DbName, Parent) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, DbInfo} = couch_db:get_db_info(Db),
+ {ok, ViewInfo} = couch_mrview:get_info(Db, <<"_design/foo">>),
+ couch_db:close(Db),
+ case (couch_util:get_value(compact_running, ViewInfo) =:= true) orelse
+ (couch_util:get_value(compact_running, DbInfo) =:= true) of
+ false ->
+ Parent ! {done, self()};
+ true ->
+ ok = timer:sleep(?DELAY),
+ wait_loop(DbName, Parent)
+ end.
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_cors_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_cors_tests.erl b/test/couchdb_cors_tests.erl
new file mode 100644
index 0000000..4e88ae7
--- /dev/null
+++ b/test/couchdb_cors_tests.erl
@@ -0,0 +1,344 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_cors_tests).
+
+-include("couch_eunit.hrl").
+-include_lib("couchdb/couch_db.hrl").
+
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
+-define(SUPPORTED_METHODS,
+ "GET, HEAD, POST, PUT, DELETE, TRACE, CONNECT, COPY, OPTIONS").
+-define(TIMEOUT, 1000).
+
+
+start() ->
+ {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+ ok = couch_config:set("httpd", "enable_cors", "true", false),
+ ok = couch_config:set("vhosts", "example.com", "/", false),
+ Pid.
+
+stop(Pid) ->
+ couch_server_sup:stop(),
+ erlang:monitor(process, Pid),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, server_stop})
+ end.
+
+setup() ->
+ DbName = ?tempdb(),
+ {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]),
+ couch_db:close(Db),
+
+ couch_config:set("cors", "credentials", "false", false),
+ couch_config:set("cors", "origins", "http://example.com", false),
+
+ Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+ Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+ Host = "http://" ++ Addr ++ ":" ++ Port,
+ {Host, ?b2l(DbName)}.
+
+setup({Mod, VHost}) ->
+ {Host, DbName} = setup(),
+ Url = case Mod of
+ server ->
+ Host;
+ db ->
+ Host ++ "/" ++ DbName
+ end,
+ DefaultHeaders = [{"Origin", "http://example.com"}]
+ ++ maybe_append_vhost(VHost),
+ {Host, DbName, Url, DefaultHeaders}.
+
+teardown(DbName) when is_list(DbName) ->
+ ok = couch_server:delete(?l2b(DbName), [?ADMIN_USER]),
+ ok;
+teardown({_, DbName}) ->
+ teardown(DbName).
+
+teardown(_, {_, DbName, _, _}) ->
+ teardown(DbName).
+
+
+cors_test_() ->
+ Funs = [
+ fun should_not_allow_origin/2,
+ fun should_not_allow_origin_with_port_mismatch/2,
+ fun should_not_allow_origin_with_scheme_mismatch/2,
+ fun should_not_all_origin_due_case_mismatch/2,
+ fun should_make_simple_request/2,
+ fun should_make_preflight_request/2,
+ fun should_make_prefligh_request_with_port/2,
+ fun should_make_prefligh_request_with_scheme/2,
+ fun should_make_prefligh_request_with_wildcard_origin/2,
+ fun should_make_request_with_credentials/2,
+ fun should_make_origin_request_with_auth/2,
+ fun should_make_preflight_request_with_auth/2
+ ],
+ {
+ "CORS (COUCHDB-431)",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ [
+ cors_tests(Funs),
+ vhost_cors_tests(Funs),
+ headers_tests()
+ ]
+ }
+ }.
+
+headers_tests() ->
+ {
+ "Various headers tests",
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_not_return_cors_headers_for_invalid_origin/1,
+ fun should_not_return_cors_headers_for_invalid_origin_preflight/1,
+ fun should_make_request_against_attachment/1,
+ fun should_make_range_request_against_attachment/1,
+ fun should_make_request_with_if_none_match_header/1
+ ]
+ }
+ }.
+
+cors_tests(Funs) ->
+ {
+ "CORS tests",
+ [
+ make_test_case(server, false, Funs),
+ make_test_case(db, false, Funs)
+ ]
+ }.
+
+vhost_cors_tests(Funs) ->
+ {
+ "Virtual Host CORS",
+ [
+ make_test_case(server, true, Funs),
+ make_test_case(db, true, Funs)
+ ]
+ }.
+
+make_test_case(Mod, UseVhost, Funs) ->
+ {
+ case Mod of server -> "Server"; db -> "Database" end,
+ {foreachx, fun setup/1, fun teardown/2, [{{Mod, UseVhost}, Fun}
+ || Fun <- Funs]}
+ }.
+
+
+should_not_allow_origin(_, {_, _, Url, Headers0}) ->
+ ?_assertEqual(undefined,
+ begin
+ couch_config:delete("cors", "origins", false),
+ Headers1 = proplists:delete("Origin", Headers0),
+ Headers = [{"Origin", "http://127.0.0.1"}]
+ ++ Headers1,
+ {ok, _, Resp, _} = test_request:get(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_not_allow_origin_with_port_mismatch({_, VHost}, {_, _, Url, _}) ->
+ ?_assertEqual(undefined,
+ begin
+ Headers = [{"Origin", "http://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}]
+ ++ maybe_append_vhost(VHost),
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_not_allow_origin_with_scheme_mismatch({_, VHost}, {_, _, Url, _}) ->
+ ?_assertEqual(undefined,
+ begin
+ Headers = [{"Origin", "http://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}]
+ ++ maybe_append_vhost(VHost),
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_not_all_origin_due_case_mismatch({_, VHost}, {_, _, Url, _}) ->
+ ?_assertEqual(undefined,
+ begin
+ Headers = [{"Origin", "http://ExAmPlE.CoM"},
+ {"Access-Control-Request-Method", "GET"}]
+ ++ maybe_append_vhost(VHost),
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_make_simple_request(_, {_, _, Url, DefaultHeaders}) ->
+ ?_test(begin
+ {ok, _, Resp, _} = test_request:get(Url, DefaultHeaders),
+ ?assertEqual(
+ undefined,
+ proplists:get_value("Access-Control-Allow-Credentials", Resp)),
+ ?assertEqual(
+ "http://example.com",
+ proplists:get_value("Access-Control-Allow-Origin", Resp)),
+ ?assertEqual(
+ "Cache-Control, Content-Type, Server",
+ proplists:get_value("Access-Control-Expose-Headers", Resp))
+ end).
+
+should_make_preflight_request(_, {_, _, Url, DefaultHeaders}) ->
+ ?_assertEqual(?SUPPORTED_METHODS,
+ begin
+ Headers = DefaultHeaders
+ ++ [{"Access-Control-Request-Method", "GET"}],
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Methods", Resp)
+ end).
+
+should_make_prefligh_request_with_port({_, VHost}, {_, _, Url, _}) ->
+ ?_assertEqual("http://example.com:5984",
+ begin
+ couch_config:set("cors", "origins", "http://example.com:5984",
+ false),
+ Headers = [{"Origin", "http://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}]
+ ++ maybe_append_vhost(VHost),
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_make_prefligh_request_with_scheme({_, VHost}, {_, _, Url, _}) ->
+ ?_assertEqual("https://example.com:5984",
+ begin
+ couch_config:set("cors", "origins", "https://example.com:5984",
+ false),
+ Headers = [{"Origin", "https://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}]
+ ++ maybe_append_vhost(VHost),
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_make_prefligh_request_with_wildcard_origin({_, VHost}, {_, _, Url, _}) ->
+ ?_assertEqual("https://example.com:5984",
+ begin
+ couch_config:set("cors", "origins", "*", false),
+ Headers = [{"Origin", "https://example.com:5984"},
+ {"Access-Control-Request-Method", "GET"}]
+ ++ maybe_append_vhost(VHost),
+ {ok, _, Resp, _} = test_request:options(Url, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_make_request_with_credentials(_, {_, _, Url, DefaultHeaders}) ->
+ ?_assertEqual("true",
+ begin
+ ok = couch_config:set("cors", "credentials", "true", false),
+ {ok, _, Resp, _} = test_request:options(Url, DefaultHeaders),
+ proplists:get_value("Access-Control-Allow-Credentials", Resp)
+ end).
+
+should_make_origin_request_with_auth(_, {_, _, Url, DefaultHeaders}) ->
+ ?_assertEqual("http://example.com",
+ begin
+ Hashed = couch_passwords:hash_admin_password(<<"test">>),
+ couch_config:set("admins", "test", Hashed, false),
+ {ok, _, Resp, _} = test_request:get(
+ Url, DefaultHeaders, [{basic_auth, {"test", "test"}}]),
+ couch_config:delete("admins", "test", false),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_make_preflight_request_with_auth(_, {_, _, Url, DefaultHeaders}) ->
+ ?_assertEqual(?SUPPORTED_METHODS,
+ begin
+ Hashed = couch_passwords:hash_admin_password(<<"test">>),
+ couch_config:set("admins", "test", Hashed, false),
+ Headers = DefaultHeaders
+ ++ [{"Access-Control-Request-Method", "GET"}],
+ {ok, _, Resp, _} = test_request:options(
+ Url, Headers, [{basic_auth, {"test", "test"}}]),
+ couch_config:delete("admins", "test", false),
+ proplists:get_value("Access-Control-Allow-Methods", Resp)
+ end).
+
+should_not_return_cors_headers_for_invalid_origin({Host, _}) ->
+ ?_assertEqual(undefined,
+ begin
+ Headers = [{"Origin", "http://127.0.0.1"}],
+ {ok, _, Resp, _} = test_request:get(Host, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_not_return_cors_headers_for_invalid_origin_preflight({Host, _}) ->
+ ?_assertEqual(undefined,
+ begin
+ Headers = [{"Origin", "http://127.0.0.1"},
+ {"Access-Control-Request-Method", "GET"}],
+ {ok, _, Resp, _} = test_request:options(Host, Headers),
+ proplists:get_value("Access-Control-Allow-Origin", Resp)
+ end).
+
+should_make_request_against_attachment({Host, DbName}) ->
+ {"COUCHDB-1689",
+ ?_assertEqual(200,
+ begin
+ Url = Host ++ "/" ++ DbName,
+ {ok, Code0, _, _} = test_request:put(
+ Url ++ "/doc/file.txt", [{"Content-Type", "text/plain"}],
+ "hello, couch!"),
+ ?assert(Code0 =:= 201),
+ {ok, Code, _, _} = test_request:get(
+ Url ++ "/doc?attachments=true",
+ [{"Origin", "http://example.com"}]),
+ Code
+ end)}.
+
+should_make_range_request_against_attachment({Host, DbName}) ->
+ {"COUCHDB-1689",
+ ?_assertEqual(206,
+ begin
+ Url = Host ++ "/" ++ DbName,
+ {ok, Code0, _, _} = test_request:put(
+ Url ++ "/doc/file.txt",
+ [{"Content-Type", "application/octet-stream"}],
+ "hello, couch!"),
+ ?assert(Code0 =:= 201),
+ {ok, Code, _, _} = test_request:get(
+ Url ++ "/doc/file.txt", [{"Origin", "http://example.com"},
+ {"Range", "bytes=0-6"}]),
+ Code
+ end)}.
+
+should_make_request_with_if_none_match_header({Host, DbName}) ->
+ {"COUCHDB-1697",
+ ?_assertEqual(304,
+ begin
+ Url = Host ++ "/" ++ DbName,
+ {ok, Code0, Headers0, _} = test_request:put(
+ Url ++ "/doc", [{"Content-Type", "application/json"}], "{}"),
+ ?assert(Code0 =:= 201),
+ ETag = proplists:get_value("ETag", Headers0),
+ {ok, Code, _, _} = test_request:get(
+ Url ++ "/doc", [{"Origin", "http://example.com"},
+ {"If-None-Match", ETag}]),
+ Code
+ end)}.
+
+
+maybe_append_vhost(true) ->
+ [{"Host", "http://example.com"}];
+maybe_append_vhost(false) ->
+ [].
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_csp_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_csp_tests.erl b/test/couchdb_csp_tests.erl
new file mode 100644
index 0000000..adb0e6d
--- /dev/null
+++ b/test/couchdb_csp_tests.erl
@@ -0,0 +1,96 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_csp_tests).
+
+-include("couch_eunit.hrl").
+
+-define(TIMEOUT, 1000).
+
+
+start() ->
+ {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+ Pid.
+
+stop(Pid) ->
+ couch_server_sup:stop(),
+ erlang:monitor(process, Pid),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, server_stop})
+ end.
+
+setup() ->
+ ok = couch_config:set("csp", "enable", "true", false),
+ Addr = couch_config:get("httpd", "bind_address", "127.0.0.1"),
+ Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+ lists:concat(["http://", Addr, ":", Port, "/_utils/"]).
+
+teardown(_) ->
+ ok.
+
+
+csp_test_() ->
+ {
+ "Content Security Policy tests",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_not_return_any_csp_headers_when_disabled/1,
+ fun should_apply_default_policy/1,
+ fun should_return_custom_policy/1,
+ fun should_only_enable_csp_when_true/1
+ ]
+ }
+ }
+ }.
+
+
+should_not_return_any_csp_headers_when_disabled(Url) ->
+ ?_assertEqual(undefined,
+ begin
+ ok = couch_config:set("csp", "enable", "false", false),
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
+
+should_apply_default_policy(Url) ->
+ ?_assertEqual(
+ "default-src 'self'; img-src 'self'; font-src 'self'; "
+ "script-src 'self' 'unsafe-eval'; style-src 'self' 'unsafe-inline';",
+ begin
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
+
+should_return_custom_policy(Url) ->
+ ?_assertEqual("default-src 'http://example.com';",
+ begin
+ ok = couch_config:set("csp", "header_value",
+ "default-src 'http://example.com';", false),
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
+
+should_only_enable_csp_when_true(Url) ->
+ ?_assertEqual(undefined,
+ begin
+ ok = couch_config:set("csp", "enable", "tru", false),
+ {ok, _, Headers, _} = test_request:get(Url),
+ proplists:get_value("Content-Security-Policy", Headers)
+ end).
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_file_compression_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_file_compression_tests.erl b/test/couchdb_file_compression_tests.erl
new file mode 100644
index 0000000..fd3f513
--- /dev/null
+++ b/test/couchdb_file_compression_tests.erl
@@ -0,0 +1,239 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_file_compression_tests).
+
+-include("couch_eunit.hrl").
+-include_lib("couchdb/couch_db.hrl").
+
+-define(ADMIN_USER, {user_ctx, #user_ctx{roles=[<<"_admin">>]}}).
+-define(DDOC_ID, <<"_design/test">>).
+-define(DOCS_COUNT, 5000).
+-define(TIMEOUT, 30000).
+
+
+start() ->
+ {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+ Pid.
+
+stop(Pid) ->
+ erlang:monitor(process, Pid),
+ couch_server_sup:stop(),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, server_stop})
+ end.
+
+setup() ->
+ couch_config:set("couchdb", "file_compression", "none", false),
+ DbName = ?tempdb(),
+ {ok, Db} = couch_db:create(DbName, [?ADMIN_USER]),
+ ok = populate_db(Db, ?DOCS_COUNT),
+ DDoc = couch_doc:from_json_obj({[
+ {<<"_id">>, ?DDOC_ID},
+ {<<"language">>, <<"javascript">>},
+ {<<"views">>, {[
+ {<<"by_id">>, {[
+ {<<"map">>, <<"function(doc){emit(doc._id, doc.string);}">>}
+ ]}}
+ ]}
+ }
+ ]}),
+ {ok, _} = couch_db:update_doc(Db, DDoc, []),
+ refresh_index(DbName),
+ ok = couch_db:close(Db),
+ DbName.
+
+teardown(DbName) ->
+ ok = couch_server:delete(DbName, [?ADMIN_USER]),
+ ok.
+
+
+couch_auth_cache_test_() ->
+ {
+ "CouchDB file compression tests",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_use_none/1,
+ fun should_use_deflate_1/1,
+ fun should_use_deflate_9/1,
+ fun should_use_snappy/1,
+ fun should_compare_compression_methods/1
+ ]
+ }
+ }
+ }.
+
+
+should_use_none(DbName) ->
+ couch_config:set("couchdb", "file_compression", "none", false),
+ {
+ "Use no compression",
+ [
+ {"compact database", ?_test(compact_db(DbName))},
+ {"compact view", ?_test(compact_view(DbName))}
+ ]
+ }.
+
+should_use_deflate_1(DbName) ->
+ couch_config:set("couchdb", "file_compression", "deflate_1", false),
+ {
+ "Use deflate compression at level 1",
+ [
+ {"compact database", ?_test(compact_db(DbName))},
+ {"compact view", ?_test(compact_view(DbName))}
+ ]
+ }.
+
+should_use_deflate_9(DbName) ->
+ couch_config:set("couchdb", "file_compression", "deflate_9", false),
+ {
+ "Use deflate compression at level 9",
+ [
+ {"compact database", ?_test(compact_db(DbName))},
+ {"compact view", ?_test(compact_view(DbName))}
+ ]
+ }.
+
+should_use_snappy(DbName) ->
+ couch_config:set("couchdb", "file_compression", "snappy", false),
+ {
+ "Use snappy compression",
+ [
+ {"compact database", ?_test(compact_db(DbName))},
+ {"compact view", ?_test(compact_view(DbName))}
+ ]
+ }.
+
+should_compare_compression_methods(DbName) ->
+ {"none > snappy > deflate_1 > deflate_9",
+ {timeout, ?TIMEOUT div 1000, ?_test(compare_compression_methods(DbName))}}.
+
+compare_compression_methods(DbName) ->
+ couch_config:set("couchdb", "file_compression", "none", false),
+ compact_db(DbName),
+ compact_view(DbName),
+ DbSizeNone = db_disk_size(DbName),
+ ViewSizeNone = view_disk_size(DbName),
+
+ couch_config:set("couchdb", "file_compression", "snappy", false),
+ compact_db(DbName),
+ compact_view(DbName),
+ DbSizeSnappy = db_disk_size(DbName),
+ ViewSizeSnappy = view_disk_size(DbName),
+
+ ?assert(DbSizeNone > DbSizeSnappy),
+ ?assert(ViewSizeNone > ViewSizeSnappy),
+
+ couch_config:set("couchdb", "file_compression", "deflate_1", false),
+ compact_db(DbName),
+ compact_view(DbName),
+ DbSizeDeflate1 = db_disk_size(DbName),
+ ViewSizeDeflate1 = view_disk_size(DbName),
+
+ ?assert(DbSizeSnappy > DbSizeDeflate1),
+ ?assert(ViewSizeSnappy > ViewSizeDeflate1),
+
+ couch_config:set("couchdb", "file_compression", "deflate_9", false),
+ compact_db(DbName),
+ compact_view(DbName),
+ DbSizeDeflate9 = db_disk_size(DbName),
+ ViewSizeDeflate9 = view_disk_size(DbName),
+
+ ?assert(DbSizeDeflate1 > DbSizeDeflate9),
+ ?assert(ViewSizeDeflate1 > ViewSizeDeflate9).
+
+
+populate_db(_Db, NumDocs) when NumDocs =< 0 ->
+ ok;
+populate_db(Db, NumDocs) ->
+ Docs = lists:map(
+ fun(_) ->
+ couch_doc:from_json_obj({[
+ {<<"_id">>, couch_uuids:random()},
+ {<<"string">>, ?l2b(lists:duplicate(1000, $X))}
+ ]})
+ end,
+ lists:seq(1, 500)),
+ {ok, _} = couch_db:update_docs(Db, Docs, []),
+ populate_db(Db, NumDocs - 500).
+
+refresh_index(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]),
+ couch_mrview:query_view(Db, DDoc, <<"by_id">>, [{stale, false}]),
+ ok = couch_db:close(Db).
+
+compact_db(DbName) ->
+ DiskSizeBefore = db_disk_size(DbName),
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, CompactPid} = couch_db:start_compact(Db),
+ MonRef = erlang:monitor(process, CompactPid),
+ receive
+ {'DOWN', MonRef, process, CompactPid, normal} ->
+ ok;
+ {'DOWN', MonRef, process, CompactPid, Reason} ->
+ erlang:error({assertion_failed,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Error compacting database: "
+ ++ couch_util:to_list(Reason)}]})
+ after ?TIMEOUT ->
+ erlang:error({assertion_failed,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Timeout waiting for database compaction"}]})
+ end,
+ ok = couch_db:close(Db),
+ DiskSizeAfter = db_disk_size(DbName),
+ ?assert(DiskSizeBefore > DiskSizeAfter).
+
+compact_view(DbName) ->
+ DiskSizeBefore = view_disk_size(DbName),
+ {ok, MonRef} = couch_mrview:compact(DbName, ?DDOC_ID, [monitor]),
+ receive
+ {'DOWN', MonRef, process, _CompactPid, normal} ->
+ ok;
+ {'DOWN', MonRef, process, _CompactPid, Reason} ->
+ erlang:error({assertion_failed,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Error compacting view group: "
+ ++ couch_util:to_list(Reason)}]})
+ after ?TIMEOUT ->
+ erlang:error({assertion_failed,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Timeout waiting for view group compaction"}]})
+ end,
+ DiskSizeAfter = view_disk_size(DbName),
+ ?assert(DiskSizeBefore > DiskSizeAfter).
+
+db_disk_size(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, Info} = couch_db:get_db_info(Db),
+ ok = couch_db:close(Db),
+ couch_util:get_value(disk_size, Info).
+
+view_disk_size(DbName) ->
+ {ok, Db} = couch_db:open_int(DbName, []),
+ {ok, DDoc} = couch_db:open_doc(Db, ?DDOC_ID, [ejson_body]),
+ {ok, Info} = couch_mrview:get_info(Db, DDoc),
+ ok = couch_db:close(Db),
+ couch_util:get_value(disk_size, Info).
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_http_proxy_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_http_proxy_tests.erl b/test/couchdb_http_proxy_tests.erl
new file mode 100644
index 0000000..acb1974
--- /dev/null
+++ b/test/couchdb_http_proxy_tests.erl
@@ -0,0 +1,462 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_http_proxy_tests).
+
+-include("couch_eunit.hrl").
+
+-record(req, {method=get, path="", headers=[], body="", opts=[]}).
+
+-define(CONFIG_FIXTURE_TEMP,
+ begin
+ FileName = filename:join([?TEMPDIR, ?tempfile() ++ ".ini"]),
+ {ok, Fd} = file:open(FileName, write),
+ ok = file:truncate(Fd),
+ ok = file:close(Fd),
+ FileName
+ end).
+-define(TIMEOUT, 5000).
+
+
+start() ->
+ % we have to write any config changes to temp ini file to not loose them
+ % when supervisor will kill all children due to reaching restart threshold
+ % (each httpd_global_handlers changes causes couch_httpd restart)
+ couch_server_sup:start_link(?CONFIG_CHAIN ++ [?CONFIG_FIXTURE_TEMP]),
+ % 49151 is IANA Reserved, let's assume no one is listening there
+ couch_config:set("httpd_global_handlers", "_error",
+ "{couch_httpd_proxy, handle_proxy_req, <<\"http://127.0.0.1:49151/\">>}"
+ ),
+ ok.
+
+stop(_) ->
+ couch_server_sup:stop(),
+ ok.
+
+setup() ->
+ {ok, Pid} = test_web:start_link(),
+ Value = lists:flatten(io_lib:format(
+ "{couch_httpd_proxy, handle_proxy_req, ~p}",
+ [list_to_binary(proxy_url())])),
+ couch_config:set("httpd_global_handlers", "_test", Value),
+ % let couch_httpd restart
+ timer:sleep(100),
+ Pid.
+
+teardown(Pid) ->
+ erlang:monitor(process, Pid),
+ test_web:stop(),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, test_web_stop})
+ end.
+
+
+http_proxy_test_() ->
+ {
+ "HTTP Proxy handler tests",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ {
+ foreach,
+ fun setup/0, fun teardown/1,
+ [
+ fun should_proxy_basic_request/1,
+ fun should_return_alternative_status/1,
+ fun should_respect_trailing_slash/1,
+ fun should_proxy_headers/1,
+ fun should_proxy_host_header/1,
+ fun should_pass_headers_back/1,
+ fun should_use_same_protocol_version/1,
+ fun should_proxy_body/1,
+ fun should_proxy_body_back/1,
+ fun should_proxy_chunked_body/1,
+ fun should_proxy_chunked_body_back/1,
+ fun should_rewrite_location_header/1,
+ fun should_not_rewrite_external_locations/1,
+ fun should_rewrite_relative_location/1,
+ fun should_refuse_connection_to_backend/1
+ ]
+ }
+
+ }
+ }.
+
+
+should_proxy_basic_request(_) ->
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/" = Req:get(path),
+ 0 = Req:get(body_length),
+ <<>> = Req:recv_body(),
+ {ok, {200, [{"Content-Type", "text/plain"}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ ?_test(check_request(#req{}, Remote, Local)).
+
+should_return_alternative_status(_) ->
+ Remote = fun(Req) ->
+ "/alternate_status" = Req:get(path),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "201", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{path = "/alternate_status"},
+ ?_test(check_request(Req, Remote, Local)).
+
+should_respect_trailing_slash(_) ->
+ Remote = fun(Req) ->
+ "/trailing_slash/" = Req:get(path),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{path="/trailing_slash/"},
+ ?_test(check_request(Req, Remote, Local)).
+
+should_proxy_headers(_) ->
+ Remote = fun(Req) ->
+ "/passes_header" = Req:get(path),
+ "plankton" = Req:get_header_value("X-CouchDB-Ralph"),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{
+ path="/passes_header",
+ headers=[{"X-CouchDB-Ralph", "plankton"}]
+ },
+ ?_test(check_request(Req, Remote, Local)).
+
+should_proxy_host_header(_) ->
+ Remote = fun(Req) ->
+ "/passes_host_header" = Req:get(path),
+ "www.google.com" = Req:get_header_value("Host"),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{
+ path="/passes_host_header",
+ headers=[{"Host", "www.google.com"}]
+ },
+ ?_test(check_request(Req, Remote, Local)).
+
+should_pass_headers_back(_) ->
+ Remote = fun(Req) ->
+ "/passes_header_back" = Req:get(path),
+ {ok, {200, [{"X-CouchDB-Plankton", "ralph"}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", Headers, "ok"}) ->
+ lists:member({"X-CouchDB-Plankton", "ralph"}, Headers);
+ (_) ->
+ false
+ end,
+ Req = #req{path="/passes_header_back"},
+ ?_test(check_request(Req, Remote, Local)).
+
+should_use_same_protocol_version(_) ->
+ Remote = fun(Req) ->
+ "/uses_same_version" = Req:get(path),
+ {1, 0} = Req:get(version),
+ {ok, {200, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "200", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{
+ path="/uses_same_version",
+ opts=[{http_vsn, {1, 0}}]
+ },
+ ?_test(check_request(Req, Remote, Local)).
+
+should_proxy_body(_) ->
+ Remote = fun(Req) ->
+ 'PUT' = Req:get(method),
+ "/passes_body" = Req:get(path),
+ <<"Hooray!">> = Req:recv_body(),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "201", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{
+ method=put,
+ path="/passes_body",
+ body="Hooray!"
+ },
+ ?_test(check_request(Req, Remote, Local)).
+
+should_proxy_body_back(_) ->
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/passes_eof_body" = Req:get(path),
+ {raw, {200, [{"Connection", "close"}], BodyChunks}}
+ end,
+ Local = fun
+ ({ok, "200", _, "foobarbazinga"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{path="/passes_eof_body"},
+ ?_test(check_request(Req, Remote, Local)).
+
+should_proxy_chunked_body(_) ->
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ Remote = fun(Req) ->
+ 'POST' = Req:get(method),
+ "/passes_chunked_body" = Req:get(path),
+ RecvBody = fun
+ ({Length, Chunk}, [Chunk | Rest]) ->
+ Length = size(Chunk),
+ Rest;
+ ({0, []}, []) ->
+ ok
+ end,
+ ok = Req:stream_body(1024 * 1024, RecvBody, BodyChunks),
+ {ok, {201, [], "ok"}}
+ end,
+ Local = fun
+ ({ok, "201", _, "ok"}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{
+ method=post,
+ path="/passes_chunked_body",
+ headers=[{"Transfer-Encoding", "chunked"}],
+ body=chunked_body(BodyChunks)
+ },
+ ?_test(check_request(Req, Remote, Local)).
+
+should_proxy_chunked_body_back(_) ->
+ ?_test(begin
+ Remote = fun(Req) ->
+ 'GET' = Req:get(method),
+ "/passes_chunked_body_back" = Req:get(path),
+ BodyChunks = [<<"foo">>, <<"bar">>, <<"bazinga">>],
+ {chunked, {200, [{"Transfer-Encoding", "chunked"}], BodyChunks}}
+ end,
+ Req = #req{
+ path="/passes_chunked_body_back",
+ opts=[{stream_to, self()}]
+ },
+
+ Resp = check_request(Req, Remote, no_local),
+ ?assertMatch({ibrowse_req_id, _}, Resp),
+ {_, ReqId} = Resp,
+
+ % Grab headers from response
+ receive
+ {ibrowse_async_headers, ReqId, "200", Headers} ->
+ ?assertEqual("chunked",
+ proplists:get_value("Transfer-Encoding", Headers)),
+ ibrowse:stream_next(ReqId)
+ after 1000 ->
+ throw({error, timeout})
+ end,
+
+ ?assertEqual(<<"foobarbazinga">>, recv_body(ReqId, [])),
+ ?assertEqual(was_ok, test_web:check_last())
+ end).
+
+should_refuse_connection_to_backend(_) ->
+ Local = fun
+ ({ok, "500", _, _}) ->
+ true;
+ (_) ->
+ false
+ end,
+ Req = #req{opts=[{url, server_url("/_error")}]},
+ ?_test(check_request(Req, no_remote, Local)).
+
+should_rewrite_location_header(_) ->
+ {
+ "Testing location header rewrites",
+ do_rewrite_tests([
+ {"Location", proxy_url() ++ "/foo/bar",
+ server_url() ++ "/foo/bar"},
+ {"Content-Location", proxy_url() ++ "/bing?q=2",
+ server_url() ++ "/bing?q=2"},
+ {"Uri", proxy_url() ++ "/zip#frag",
+ server_url() ++ "/zip#frag"},
+ {"Destination", proxy_url(),
+ server_url() ++ "/"}
+ ])
+ }.
+
+should_not_rewrite_external_locations(_) ->
+ {
+ "Testing no rewrite of external locations",
+ do_rewrite_tests([
+ {"Location", external_url() ++ "/search",
+ external_url() ++ "/search"},
+ {"Content-Location", external_url() ++ "/s?q=2",
+ external_url() ++ "/s?q=2"},
+ {"Uri", external_url() ++ "/f#f",
+ external_url() ++ "/f#f"},
+ {"Destination", external_url() ++ "/f?q=2#f",
+ external_url() ++ "/f?q=2#f"}
+ ])
+ }.
+
+should_rewrite_relative_location(_) ->
+ {
+ "Testing relative rewrites",
+ do_rewrite_tests([
+ {"Location", "/foo",
+ server_url() ++ "/foo"},
+ {"Content-Location", "bar",
+ server_url() ++ "/bar"},
+ {"Uri", "/zing?q=3",
+ server_url() ++ "/zing?q=3"},
+ {"Destination", "bing?q=stuff#yay",
+ server_url() ++ "/bing?q=stuff#yay"}
+ ])
+ }.
+
+
+do_rewrite_tests(Tests) ->
+ lists:map(fun({Header, Location, Url}) ->
+ should_rewrite_header(Header, Location, Url)
+ end, Tests).
+
+should_rewrite_header(Header, Location, Url) ->
+ Remote = fun(Req) ->
+ "/rewrite_test" = Req:get(path),
+ {ok, {302, [{Header, Location}], "ok"}}
+ end,
+ Local = fun
+ ({ok, "302", Headers, "ok"}) ->
+ ?assertEqual(Url, couch_util:get_value(Header, Headers)),
+ true;
+ (E) ->
+ ?debugFmt("~p", [E]),
+ false
+ end,
+ Req = #req{path="/rewrite_test"},
+ {Header, ?_test(check_request(Req, Remote, Local))}.
+
+
+server_url() ->
+ server_url("/_test").
+
+server_url(Resource) ->
+ Addr = couch_config:get("httpd", "bind_address"),
+ Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+ lists:concat(["http://", Addr, ":", Port, Resource]).
+
+proxy_url() ->
+ "http://127.0.0.1:" ++ integer_to_list(test_web:get_port()).
+
+external_url() ->
+ "https://google.com".
+
+check_request(Req, Remote, Local) ->
+ case Remote of
+ no_remote ->
+ ok;
+ _ ->
+ test_web:set_assert(Remote)
+ end,
+ Url = case proplists:lookup(url, Req#req.opts) of
+ none ->
+ server_url() ++ Req#req.path;
+ {url, DestUrl} ->
+ DestUrl
+ end,
+ Opts = [{headers_as_is, true} | Req#req.opts],
+ Resp =ibrowse:send_req(
+ Url, Req#req.headers, Req#req.method, Req#req.body, Opts
+ ),
+ %?debugFmt("ibrowse response: ~p", [Resp]),
+ case Local of
+ no_local ->
+ ok;
+ _ ->
+ ?assert(Local(Resp))
+ end,
+ case {Remote, Local} of
+ {no_remote, _} ->
+ ok;
+ {_, no_local} ->
+ ok;
+ _ ->
+ ?assertEqual(was_ok, test_web:check_last())
+ end,
+ Resp.
+
+chunked_body(Chunks) ->
+ chunked_body(Chunks, []).
+
+chunked_body([], Acc) ->
+ iolist_to_binary(lists:reverse(Acc, "0\r\n\r\n"));
+chunked_body([Chunk | Rest], Acc) ->
+ Size = to_hex(size(Chunk)),
+ chunked_body(Rest, ["\r\n", Chunk, "\r\n", Size | Acc]).
+
+to_hex(Val) ->
+ to_hex(Val, []).
+
+to_hex(0, Acc) ->
+ Acc;
+to_hex(Val, Acc) ->
+ to_hex(Val div 16, [hex_char(Val rem 16) | Acc]).
+
+hex_char(V) when V < 10 -> $0 + V;
+hex_char(V) -> $A + V - 10.
+
+recv_body(ReqId, Acc) ->
+ receive
+ {ibrowse_async_response, ReqId, Data} ->
+ recv_body(ReqId, [Data | Acc]);
+ {ibrowse_async_response_end, ReqId} ->
+ iolist_to_binary(lists:reverse(Acc));
+ Else ->
+ throw({error, unexpected_mesg, Else})
+ after ?TIMEOUT ->
+ throw({error, timeout})
+ end.
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_os_daemons_tests.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_os_daemons_tests.erl b/test/couchdb_os_daemons_tests.erl
new file mode 100644
index 0000000..aa949c9
--- /dev/null
+++ b/test/couchdb_os_daemons_tests.erl
@@ -0,0 +1,228 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_os_daemons_tests).
+
+-include("couch_eunit.hrl").
+
+%% keep in sync with couchdb/couch_os_daemons.erl
+-record(daemon, {
+ port,
+ name,
+ cmd,
+ kill,
+ status=running,
+ cfg_patterns=[],
+ errors=[],
+ buf=[]
+}).
+
+-define(DAEMON_CONFIGER, "os_daemon_configer.escript").
+-define(DAEMON_LOOPER, "os_daemon_looper.escript").
+-define(DAEMON_BAD_PERM, "os_daemon_bad_perm.sh").
+-define(DAEMON_CAN_REBOOT, "os_daemon_can_reboot.sh").
+-define(DAEMON_DIE_ON_BOOT, "os_daemon_die_on_boot.sh").
+-define(DAEMON_DIE_QUICKLY, "os_daemon_die_quickly.sh").
+-define(DELAY, 100).
+-define(TIMEOUT, 1000).
+
+
+setup(DName) ->
+ {ok, CfgPid} = couch_config:start_link(?CONFIG_CHAIN),
+ {ok, OsDPid} = couch_os_daemons:start_link(),
+ couch_config:set("os_daemons", DName,
+ filename:join([?FIXTURESDIR, DName]), false),
+ timer:sleep(?DELAY), % sleep a bit to let daemon set kill flag
+ {CfgPid, OsDPid}.
+
+teardown(_, {CfgPid, OsDPid}) ->
+ erlang:monitor(process, CfgPid),
+ couch_config:stop(),
+ receive
+ {'DOWN', _, _, CfgPid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, config_stop})
+ end,
+
+ erlang:monitor(process, OsDPid),
+ exit(OsDPid, normal),
+ receive
+ {'DOWN', _, _, OsDPid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, os_daemon_stop})
+ end.
+
+
+os_daemons_test_() ->
+ {
+ "OS Daemons tests",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [{?DAEMON_LOOPER, Fun} || Fun <- [
+ fun should_check_daemon/2,
+ fun should_check_daemon_table_form/2,
+ fun should_clean_tables_on_daemon_remove/2,
+ fun should_spawn_multiple_daemons/2,
+ fun should_keep_alive_one_daemon_on_killing_other/2
+ ]]
+ }
+ }.
+
+configuration_reader_test_() ->
+ {
+ "OS Daemon requests CouchDB configuration",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [{?DAEMON_CONFIGER,
+ fun should_read_write_config_settings_by_daemon/2}]
+
+ }
+ }.
+
+error_test_() ->
+ {
+ "OS Daemon process error tests",
+ {
+ foreachx,
+ fun setup/1, fun teardown/2,
+ [{?DAEMON_BAD_PERM, fun should_fail_due_to_lack_of_permissions/2},
+ {?DAEMON_DIE_ON_BOOT, fun should_die_on_boot/2},
+ {?DAEMON_DIE_QUICKLY, fun should_die_quickly/2},
+ {?DAEMON_CAN_REBOOT, fun should_not_being_halted/2}]
+ }
+ }.
+
+
+should_check_daemon(DName, _) ->
+ ?_test(begin
+ {ok, [D]} = couch_os_daemons:info([table]),
+ check_daemon(D, DName)
+ end).
+
+should_check_daemon_table_form(DName, _) ->
+ ?_test(begin
+ {ok, Tab} = couch_os_daemons:info(),
+ [D] = ets:tab2list(Tab),
+ check_daemon(D, DName)
+ end).
+
+should_clean_tables_on_daemon_remove(DName, _) ->
+ ?_test(begin
+ couch_config:delete("os_daemons", DName, false),
+ {ok, Tab2} = couch_os_daemons:info(),
+ ?_assertEqual([], ets:tab2list(Tab2))
+ end).
+
+should_spawn_multiple_daemons(DName, _) ->
+ ?_test(begin
+ couch_config:set("os_daemons", "bar",
+ filename:join([?FIXTURESDIR, DName]), false),
+ couch_config:set("os_daemons", "baz",
+ filename:join([?FIXTURESDIR, DName]), false),
+ timer:sleep(?DELAY),
+ {ok, Daemons} = couch_os_daemons:info([table]),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, Daemons),
+ {ok, Tab} = couch_os_daemons:info(),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, ets:tab2list(Tab))
+ end).
+
+should_keep_alive_one_daemon_on_killing_other(DName, _) ->
+ ?_test(begin
+ couch_config:set("os_daemons", "bar",
+ filename:join([?FIXTURESDIR, DName]), false),
+ timer:sleep(?DELAY),
+ {ok, Daemons} = couch_os_daemons:info([table]),
+ lists:foreach(fun(D) ->
+ check_daemon(D)
+ end, Daemons),
+
+ couch_config:delete("os_daemons", "bar", false),
+ timer:sleep(?DELAY),
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, DName),
+
+ {ok, Tab} = couch_os_daemons:info(),
+ [T] = ets:tab2list(Tab),
+ check_daemon(T, DName)
+ end).
+
+should_read_write_config_settings_by_daemon(DName, _) ->
+ ?_test(begin
+ % have to wait till daemon run all his tests
+ % see daemon's script for more info
+ timer:sleep(?TIMEOUT),
+ {ok, [D]} = couch_os_daemons:info([table]),
+ check_daemon(D, DName)
+ end).
+
+should_fail_due_to_lack_of_permissions(DName, _) ->
+ ?_test(should_halts(DName, 1000)).
+
+should_die_on_boot(DName, _) ->
+ ?_test(should_halts(DName, 1000)).
+
+should_die_quickly(DName, _) ->
+ ?_test(should_halts(DName, 4000)).
+
+should_not_being_halted(DName, _) ->
+ ?_test(begin
+ timer:sleep(1000),
+ {ok, [D1]} = couch_os_daemons:info([table]),
+ check_daemon(D1, DName, 0),
+
+ % Should reboot every two seconds. We're at 1s, so wait
+ % until 3s to be in the middle of the next invocation's
+ % life span.
+
+ timer:sleep(2000),
+ {ok, [D2]} = couch_os_daemons:info([table]),
+ check_daemon(D2, DName, 1),
+
+ % If the kill command changed, that means we rebooted the process.
+ ?assertNotEqual(D1#daemon.kill, D2#daemon.kill)
+ end).
+
+should_halts(DName, Time) ->
+ timer:sleep(Time),
+ {ok, [D]} = couch_os_daemons:info([table]),
+ check_dead(D, DName),
+ couch_config:delete("os_daemons", DName, false).
+
+check_daemon(D) ->
+ check_daemon(D, D#daemon.name).
+
+check_daemon(D, Name) ->
+ check_daemon(D, Name, 0).
+
+check_daemon(D, Name, Errs) ->
+ ?assert(is_port(D#daemon.port)),
+ ?assertEqual(Name, D#daemon.name),
+ ?assertNotEqual(undefined, D#daemon.kill),
+ ?assertEqual(running, D#daemon.status),
+ ?assertEqual(Errs, length(D#daemon.errors)),
+ ?assertEqual([], D#daemon.buf).
+
+check_dead(D, Name) ->
+ ?assert(is_port(D#daemon.port)),
+ ?assertEqual(Name, D#daemon.name),
+ ?assertNotEqual(undefined, D#daemon.kill),
+ ?assertEqual(halted, D#daemon.status),
+ ?assertEqual(nil, D#daemon.errors),
+ ?assertEqual(nil, D#daemon.buf).
http://git-wip-us.apache.org/repos/asf/couchdb-couch/blob/661443fb/test/couchdb_os_proc_pool.erl
----------------------------------------------------------------------
diff --git a/test/couchdb_os_proc_pool.erl b/test/couchdb_os_proc_pool.erl
new file mode 100644
index 0000000..1bb266e
--- /dev/null
+++ b/test/couchdb_os_proc_pool.erl
@@ -0,0 +1,179 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+% http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_os_proc_pool).
+
+-include("couch_eunit.hrl").
+-include_lib("couchdb/couch_db.hrl").
+
+-define(TIMEOUT, 3000).
+
+
+start() ->
+ {ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
+ couch_config:set("query_server_config", "os_process_limit", "3", false),
+ Pid.
+
+stop(Pid) ->
+ couch_server_sup:stop(),
+ erlang:monitor(process, Pid),
+ receive
+ {'DOWN', _, _, Pid, _} ->
+ ok
+ after ?TIMEOUT ->
+ throw({timeout, server_stop})
+ end.
+
+
+os_proc_pool_test_() ->
+ {
+ "OS processes pool tests",
+ {
+ setup,
+ fun start/0, fun stop/1,
+ [
+ should_block_new_proc_on_full_pool(),
+ should_free_slot_on_proc_unexpected_exit()
+ ]
+ }
+ }.
+
+
+should_block_new_proc_on_full_pool() ->
+ ?_test(begin
+ Client1 = spawn_client(),
+ Client2 = spawn_client(),
+ Client3 = spawn_client(),
+
+ ?assertEqual(ok, ping_client(Client1)),
+ ?assertEqual(ok, ping_client(Client2)),
+ ?assertEqual(ok, ping_client(Client3)),
+
+ Proc1 = get_client_proc(Client1, "1"),
+ Proc2 = get_client_proc(Client2, "2"),
+ Proc3 = get_client_proc(Client3, "3"),
+
+ ?assertNotEqual(Proc1, Proc2),
+ ?assertNotEqual(Proc2, Proc3),
+ ?assertNotEqual(Proc3, Proc1),
+
+ Client4 = spawn_client(),
+ ?assertEqual(timeout, ping_client(Client4)),
+
+ ?assertEqual(ok, stop_client(Client1)),
+ ?assertEqual(ok, ping_client(Client4)),
+
+ Proc4 = get_client_proc(Client4, "4"),
+ ?assertEqual(Proc1, Proc4),
+
+ lists:map(fun(C) ->
+ ?assertEqual(ok, stop_client(C))
+ end, [Client2, Client3, Client4])
+ end).
+
+should_free_slot_on_proc_unexpected_exit() ->
+ ?_test(begin
+ Client1 = spawn_client(),
+ Client2 = spawn_client(),
+ Client3 = spawn_client(),
+
+ ?assertEqual(ok, ping_client(Client1)),
+ ?assertEqual(ok, ping_client(Client2)),
+ ?assertEqual(ok, ping_client(Client3)),
+
+ Proc1 = get_client_proc(Client1, "1"),
+ Proc2 = get_client_proc(Client2, "2"),
+ Proc3 = get_client_proc(Client3, "3"),
+
+ ?assertNotEqual(Proc1, Proc2),
+ ?assertNotEqual(Proc2, Proc3),
+ ?assertNotEqual(Proc3, Proc1),
+
+ ?assertEqual(ok, kill_client(Client1)),
+
+ Client4 = spawn_client(),
+ ?assertEqual(ok, ping_client(Client4)),
+
+ Proc4 = get_client_proc(Client4, "4"),
+ ?assertNotEqual(Proc4, Proc1),
+ ?assertNotEqual(Proc2, Proc4),
+ ?assertNotEqual(Proc3, Proc4),
+
+ lists:map(fun(C) ->
+ ?assertEqual(ok, stop_client(C))
+ end, [Client2, Client3, Client4])
+ end).
+
+
+spawn_client() ->
+ Parent = self(),
+ Ref = make_ref(),
+ Pid = spawn(fun() ->
+ Proc = couch_query_servers:get_os_process(<<"javascript">>),
+ loop(Parent, Ref, Proc)
+ end),
+ {Pid, Ref}.
+
+ping_client({Pid, Ref}) ->
+ Pid ! ping,
+ receive
+ {pong, Ref} ->
+ ok
+ after ?TIMEOUT ->
+ timeout
+ end.
+
+get_client_proc({Pid, Ref}, ClientName) ->
+ Pid ! get_proc,
+ receive
+ {proc, Ref, Proc} -> Proc
+ after ?TIMEOUT ->
+ erlang:error({assertion_failed,
+ [{module, ?MODULE},
+ {line, ?LINE},
+ {reason, "Timeout getting client "
+ ++ ClientName ++ " proc"}]})
+ end.
+
+stop_client({Pid, Ref}) ->
+ Pid ! stop,
+ receive
+ {stop, Ref} ->
+ ok
+ after ?TIMEOUT ->
+ timeout
+ end.
+
+kill_client({Pid, Ref}) ->
+ Pid ! die,
+ receive
+ {die, Ref} ->
+ ok
+ after ?TIMEOUT ->
+ timeout
+ end.
+
+loop(Parent, Ref, Proc) ->
+ receive
+ ping ->
+ Parent ! {pong, Ref},
+ loop(Parent, Ref, Proc);
+ get_proc ->
+ Parent ! {proc, Ref, Proc},
+ loop(Parent, Ref, Proc);
+ stop ->
+ couch_query_servers:ret_os_process(Proc),
+ Parent ! {stop, Ref};
+ die ->
+ Parent ! {die, Ref},
+ exit(some_error)
+ end.