From 9bc4dda0e48aa42977b9362ba3828737f26ba827 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 15:24:22 +0200 Subject: [PATCH 01/32] feat(access): add access handling to chttpd --- src/chttpd/src/chttpd.erl | 2 ++ src/chttpd/src/chttpd_db.erl | 21 ++++++++++++++++----- src/chttpd/src/chttpd_view.erl | 15 +++++++++++++++ 3 files changed, 33 insertions(+), 5 deletions(-) diff --git a/src/chttpd/src/chttpd.erl b/src/chttpd/src/chttpd.erl index c25c1883868..3c9ddd13872 100644 --- a/src/chttpd/src/chttpd.erl +++ b/src/chttpd/src/chttpd.erl @@ -1031,6 +1031,8 @@ error_info({bad_request, Error, Reason}) -> {400, couch_util:to_binary(Error), couch_util:to_binary(Reason)}; error_info({query_parse_error, Reason}) -> {400, <<"query_parse_error">>, Reason}; +error_info(access) -> + {403, <<"forbidden">>, <<"access">>}; error_info(database_does_not_exist) -> {404, <<"not_found">>, <<"Database does not exist.">>}; error_info(not_found) -> diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index 748b356fd6e..46fdb444ad9 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -955,16 +955,18 @@ view_cb(Msg, Acc) -> couch_mrview_http:view_cb(Msg, Acc). db_doc_req(#httpd{method = 'DELETE'} = Req, Db, DocId) -> - % check for the existence of the doc to handle the 404 case. - couch_doc_open(Db, DocId, nil, []), - case chttpd:qs_value(Req, "rev") of + % fetch the old doc revision, so we can compare access control + % in send_update_doc() later. + Doc0 = couch_doc_open(Db, DocId, nil, [{user_ctx, Req#httpd.user_ctx}]), + Revs = chttpd:qs_value(Req, "rev"), + case Revs of undefined -> Body = {[{<<"_deleted">>, true}]}; Rev -> Body = {[{<<"_rev">>, ?l2b(Rev)}, {<<"_deleted">>, true}]} end, - Doc = couch_doc_from_req(Req, Db, DocId, Body), - send_updated_doc(Req, Db, DocId, Doc); + Doc = Doc0#doc{revs=Revs,body=Body,deleted=true}, + send_updated_doc(Req, Db, DocId, couch_doc_from_req(Req, Db, DocId, Doc)); db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) -> #doc_query_args{ rev = Rev0, @@ -1414,6 +1416,8 @@ receive_request_data(Req, LenLeft) when LenLeft > 0 -> receive_request_data(_Req, _) -> throw(<<"expected more data">>). +update_doc_result_to_json({#doc{id=Id,revs=Rev}, access}) -> + update_doc_result_to_json({{Id, Rev}, access}); update_doc_result_to_json({error, _} = Error) -> {_Code, Err, Msg} = chttpd:error_info(Error), {[ @@ -1968,6 +1972,7 @@ parse_shards_opt(Req) -> [ {n, parse_shards_opt("n", Req, config:get_integer("cluster", "n", 3))}, {q, parse_shards_opt("q", Req, config:get_integer("cluster", "q", 2))}, + {access, parse_shards_opt_access(chttpd:qs_value(Req, "access", false))}, {placement, parse_shards_opt( "placement", Req, config:get("cluster", "placement") @@ -2004,6 +2009,12 @@ parse_shards_opt(Param, Req, Default) -> false -> throw({bad_request, Err}) end. +parse_shards_opt_access(Value) when is_boolean(Value) -> + Value; +parse_shards_opt_access(_Value) -> + Err = ?l2b(["The `access` value should be a boolean."]), + throw({bad_request, Err}). + parse_engine_opt(Req) -> case chttpd:qs_value(Req, "engine") of undefined -> diff --git a/src/chttpd/src/chttpd_view.erl b/src/chttpd/src/chttpd_view.erl index 1d721d18988..f74088dbcc6 100644 --- a/src/chttpd/src/chttpd_view.erl +++ b/src/chttpd/src/chttpd_view.erl @@ -69,6 +69,21 @@ fabric_query_view(Db, Req, DDoc, ViewName, Args) -> Max = chttpd:chunked_response_buffer_size(), VAcc = #vacc{db = Db, req = Req, threshold = Max}, Options = [{user_ctx, Req#httpd.user_ctx}], +% {ok, Resp} = fabric:query_view(Db, Options, DDoc, ViewName, +% fun view_cb/2, VAcc, Args), +% {ok, Resp#vacc.resp}. +% % TODO: This might just be a debugging leftover, we might be able +% % to undo this by just returning {ok, Resp#vacc.resp} +% % However, this *might* be here because we need to handle +% % errors here now, because access might tell us to. +% case fabric:query_view(Db, Options, DDoc, ViewName, +% fun view_cb/2, VAcc, Args) of +% {ok, Resp} -> +% {ok, Resp#vacc.resp}; +% {error, Error} -> +% throw(Error) +% end. + {ok, Resp} = fabric:query_view( Db, Options, From ea4e3cc43600c0f64983bbd3ba7240d1405b6d3e Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 15:42:29 +0200 Subject: [PATCH 02/32] feat(access): add access to couch_db internal records --- src/couch/include/couch_db.hrl | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/couch/include/couch_db.hrl b/src/couch/include/couch_db.hrl index e70706a7f98..4b5e9705bb0 100644 --- a/src/couch/include/couch_db.hrl +++ b/src/couch/include/couch_db.hrl @@ -65,7 +65,8 @@ -record(doc_info, { id = <<"">>, high_seq = 0, - revs = [] % rev_info + revs = [], % rev_info + access = [] }). -record(size_info, { @@ -78,7 +79,8 @@ update_seq = 0, deleted = false, rev_tree = [], - sizes = #size_info{} + sizes = #size_info{}, + access = [] }). -record(httpd, { @@ -122,7 +124,8 @@ % key/value tuple of meta information, provided when using special options: % couch_db:open_doc(Db, Id, Options). - meta = [] + meta = [], + access = [] }). @@ -205,7 +208,8 @@ ptr, seq, sizes = #size_info{}, - atts = [] + atts = [], + access = [] }). -record (fabric_changes_acc, { From d95945c8c54bf59570734d2023a918ad4d09ca59 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 17:01:04 +0200 Subject: [PATCH 03/32] feat(access): handle new records in couch_doc --- src/couch/src/couch_doc.erl | 44 +++++++++++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 7 deletions(-) diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl index 95b1c8b4165..61ea4cbe8cc 100644 --- a/src/couch/src/couch_doc.erl +++ b/src/couch/src/couch_doc.erl @@ -26,6 +26,8 @@ -export([with_ejson_body/1]). -export([is_deleted/1]). +-export([has_access/1, has_no_access/1]). + -include_lib("couch/include/couch_db.hrl"). -spec to_path(#doc{}) -> path(). @@ -40,15 +42,28 @@ to_branch(Doc, [RevId | Rest]) -> [{RevId, ?REV_MISSING, to_branch(Doc, Rest)}]. % helpers used by to_json_obj +reduce_access({Access}) -> Access; +reduce_access(Access) -> Access. + to_json_rev(0, []) -> []; to_json_rev(Start, [FirstRevId | _]) -> [{<<"_rev">>, ?l2b([integer_to_list(Start), "-", revid_to_str(FirstRevId)])}]. -to_json_body(true, {Body}) -> +% TODO: remove if we can +% to_json_body(Del, Body) -> +% to_json_body(Del, Body, []). + +to_json_body(true, {Body}, []) -> Body ++ [{<<"_deleted">>, true}]; -to_json_body(false, {Body}) -> - Body. +to_json_body(false, {Body}, []) -> + Body; +to_json_body(true, {Body}, Access0) -> + Access = reduce_access(Access0), + Body ++ [{<<"_deleted">>, true}] ++ [{<<"_access">>, {Access}}]; +to_json_body(false, {Body}, Access0) -> + Access = reduce_access(Access0), + Body ++ [{<<"_access">>, Access}]. to_json_revisions(Options, Start, RevIds0) -> RevIds = @@ -138,14 +153,15 @@ doc_to_json_obj( deleted = Del, body = Body, revs = {Start, RevIds}, - meta = Meta + meta = Meta, + access = Access } = Doc, Options ) -> { [{<<"_id">>, Id}] ++ to_json_rev(Start, RevIds) ++ - to_json_body(Del, Body) ++ + to_json_body(Del, Body, Access) ++ to_json_revisions(Options, Start, RevIds) ++ to_json_meta(Meta) ++ to_json_attachments(Doc#doc.atts, Options) @@ -401,7 +417,7 @@ max_seq(Tree, UpdateSeq) -> end, couch_key_tree:fold(FoldFun, UpdateSeq, Tree). -to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq}) -> +to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq, access = Access}) -> RevInfosAndPath = [ {rev_info(Node), Path} || {_Leaf, Path} = Node <- @@ -419,7 +435,7 @@ to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq}) ), [{_RevInfo, WinPath} | _] = SortedRevInfosAndPath, RevInfos = [RevInfo || {RevInfo, _Path} <- SortedRevInfosAndPath], - {#doc_info{id = Id, high_seq = max_seq(Tree, FDISeq), revs = RevInfos}, WinPath}. + {#doc_info{id = Id, high_seq = max_seq(Tree, FDISeq), revs = RevInfos, access = Access}, WinPath}. rev_info({#leaf{} = Leaf, {Pos, [RevId | _]}}) -> #rev_info{ @@ -459,6 +475,20 @@ is_deleted(Tree) -> false end. +get_access({Props}) -> + get_access(couch_doc:from_json_obj({Props})); +get_access(#doc{access=Access}) -> + Access. + +has_access(Doc) -> + has_access1(get_access(Doc)). + +has_no_access(Doc) -> + not has_access1(get_access(Doc)). + +has_access1([]) -> false; +has_access1(_) -> true. + get_validate_doc_fun({Props}) -> get_validate_doc_fun(couch_doc:from_json_obj({Props})); get_validate_doc_fun(#doc{body = {Props}} = DDoc) -> From a9eb33df8da2f060d926cc9194d9529bfd94f422 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 17:13:25 +0200 Subject: [PATCH 04/32] feat(access): add new _users role for all authenticated users --- src/couch/src/couch_httpd_auth.erl | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/couch/src/couch_httpd_auth.erl b/src/couch/src/couch_httpd_auth.erl index 4a7b217d1fe..2e62c88919b 100644 --- a/src/couch/src/couch_httpd_auth.erl +++ b/src/couch/src/couch_httpd_auth.erl @@ -99,6 +99,13 @@ basic_name_pw(Req) -> nil end. +extract_roles(UserProps) -> + Roles = couch_util:get_value(<<"roles">>, UserProps, []), + case lists:member(<<"_admin">>, Roles) of + true -> Roles; + _ -> Roles ++ [<<"_users">>] + end. + default_authentication_handler(Req) -> default_authentication_handler(Req, couch_auth_cache). @@ -117,7 +124,7 @@ default_authentication_handler(Req, AuthModule) -> Req#httpd{ user_ctx = #user_ctx{ name = UserName, - roles = couch_util:get_value(<<"roles">>, UserProps, []) + roles = extract_roles(UserProps) } }; false -> @@ -189,7 +196,7 @@ proxy_auth_user(Req) -> Roles = case header_value(Req, XHeaderRoles) of undefined -> []; - Else -> re:split(Else, "\\s*,\\s*", [trim, {return, binary}]) + Else -> [<<"_users">> | re:split(Else, "\\s*,\\s*", [trim, {return, binary}])] end, case chttpd_util:get_chttpd_auth_config_boolean( @@ -370,9 +377,7 @@ cookie_authentication_handler(#httpd{mochi_req = MochiReq} = Req, AuthModule) -> Req#httpd{ user_ctx = #user_ctx{ name = ?l2b(User), - roles = couch_util:get_value( - <<"roles">>, UserProps, [] - ) + roles = extract_roles(UserProps) }, auth = {FullSecret, TimeLeft < Timeout * 0.9} }; @@ -500,7 +505,7 @@ handle_session_req(#httpd{method = 'POST', mochi_req = MochiReq} = Req, AuthModu {[ {ok, true}, {name, UserName}, - {roles, couch_util:get_value(<<"roles">>, UserProps, [])} + {roles, extract_roles(UserProps)} ]} ); false -> From 78eb8d800774868f8b8e3f9d961cbdf965c961b1 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 17:18:11 +0200 Subject: [PATCH 05/32] feat(access): add access query server --- src/couch/src/couch_access_native_proc.erl | 143 +++++++++++++++++++++ src/couch/src/couch_proc_manager.erl | 1 + 2 files changed, 144 insertions(+) create mode 100644 src/couch/src/couch_access_native_proc.erl diff --git a/src/couch/src/couch_access_native_proc.erl b/src/couch/src/couch_access_native_proc.erl new file mode 100644 index 00000000000..965b124de4a --- /dev/null +++ b/src/couch/src/couch_access_native_proc.erl @@ -0,0 +1,143 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(couch_access_native_proc). +-behavior(gen_server). + + +-export([ + start_link/0, + set_timeout/2, + prompt/2 +]). + +-export([ + init/1, + terminate/2, + handle_call/3, + handle_cast/2, + handle_info/2, + code_change/3 +]). + + +-record(st, { + indexes = [], + timeout = 5000 % TODO: make configurable +}). + +start_link() -> + gen_server:start_link(?MODULE, [], []). + + +set_timeout(Pid, TimeOut) when is_integer(TimeOut), TimeOut > 0 -> + gen_server:call(Pid, {set_timeout, TimeOut}). + + +prompt(Pid, Data) -> + gen_server:call(Pid, {prompt, Data}). + + +init(_) -> + {ok, #st{}}. + + +terminate(_Reason, _St) -> + ok. + + +handle_call({set_timeout, TimeOut}, _From, St) -> + {reply, ok, St#st{timeout=TimeOut}}; + +handle_call({prompt, [<<"reset">>]}, _From, St) -> + {reply, true, St#st{indexes=[]}}; + +handle_call({prompt, [<<"reset">>, _QueryConfig]}, _From, St) -> + {reply, true, St#st{indexes=[]}}; + +handle_call({prompt, [<<"add_fun">>, IndexInfo]}, _From, St) -> + {reply, true, St}; + +handle_call({prompt, [<<"map_doc">>, Doc]}, _From, St) -> + {reply, map_doc(St, mango_json:to_binary(Doc)), St}; + +handle_call({prompt, [<<"reduce">>, _, _]}, _From, St) -> + {reply, null, St}; + +handle_call({prompt, [<<"rereduce">>, _, _]}, _From, St) -> + {reply, null, St}; + +handle_call({prompt, [<<"index_doc">>, Doc]}, _From, St) -> + {reply, [[]], St}; + +handle_call(Msg, _From, St) -> + {stop, {invalid_call, Msg}, {invalid_call, Msg}, St}. + +handle_cast(garbage_collect, St) -> + erlang:garbage_collect(), + {noreply, St}; + +handle_cast(Msg, St) -> + {stop, {invalid_cast, Msg}, St}. + + +handle_info(Msg, St) -> + {stop, {invalid_info, Msg}, St}. + + +code_change(_OldVsn, St, _Extra) -> + {ok, St}. + +% return value is an array of arrays, first dimension is the different indexes +% [0] will be by-access-id // for this test, later we should make this by-access +% -seq, since that one we will always need, and by-access-id can be opt-in. +% the second dimension is the number of emit kv pairs: +% [ // the return value +% [ // the first view +% ['k1', 'v1'], // the first k/v pair for the first view +% ['k2', 'v2'] // second, etc. +% ], +% [ // second view +% ['l1', 'w1'] // first k/v par in second view +% ] +% ] +% {"id":"account/bongel","key":"account/bongel","value":{"rev":"1-967a00dff5e02add41819138abb3284d"}}, + +map_doc(_St, {Doc}) -> + case couch_util:get_value(<<"_access">>, Doc) of + undefined -> + [[],[]]; % do not index this doc + Access when is_list(Access) -> + Id = couch_util:get_value(<<"_id">>, Doc), + Rev = couch_util:get_value(<<"_rev">>, Doc), + Seq = couch_util:get_value(<<"_seq">>, Doc), + Deleted = couch_util:get_value(<<"_deleted">>, Doc, false), + BodySp = couch_util:get_value(<<"_body_sp">>, Doc), + % by-access-id + ById = case Deleted of + false -> + lists:map(fun(UserOrRole) -> [ + [[UserOrRole, Id], Rev] + ] end, Access); + _True -> [[]] + end, + + % by-access-seq + BySeq = lists:map(fun(UserOrRole) -> [ + [[UserOrRole, Seq], [{rev, Rev}, {deleted, Deleted}, {body_sp, BodySp}]] + ] end, Access), + ById ++ BySeq; + Else -> + % TODO: no comprende: should not be needed once we implement + % _access field validation + [[],[]] + end. diff --git a/src/couch/src/couch_proc_manager.erl b/src/couch/src/couch_proc_manager.erl index 46765b339f4..f7903ebd4e6 100644 --- a/src/couch/src/couch_proc_manager.erl +++ b/src/couch/src/couch_proc_manager.erl @@ -104,6 +104,7 @@ init([]) -> ets:insert(?SERVERS, get_servers_from_env("COUCHDB_QUERY_SERVER_")), ets:insert(?SERVERS, get_servers_from_env("COUCHDB_NATIVE_QUERY_SERVER_")), ets:insert(?SERVERS, [{"QUERY", {mango_native_proc, start_link, []}}]), + ets:insert(?SERVERS, [{"_ACCESS", {couch_access_native_proc, start_link, []}}]), maybe_configure_erlang_native_servers(), {ok, #state{ From cc05b2bfbe9854e908298c026e9efe2d6aab18e9 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 17:28:12 +0200 Subject: [PATCH 06/32] feat(access): expand couch_btree / bt_engine to handle access --- src/couch/src/couch_bt_engine.erl | 27 +++++++++++++++++---------- src/couch/src/couch_btree.erl | 12 ++++++++++++ 2 files changed, 29 insertions(+), 10 deletions(-) diff --git a/src/couch/src/couch_bt_engine.erl b/src/couch/src/couch_bt_engine.erl index 0549de566d3..368425beba5 100644 --- a/src/couch/src/couch_bt_engine.erl +++ b/src/couch/src/couch_bt_engine.erl @@ -664,20 +664,22 @@ id_tree_split(#full_doc_info{} = Info) -> update_seq = Seq, deleted = Deleted, sizes = SizeInfo, - rev_tree = Tree + rev_tree = Tree, + access = Access } = Info, - {Id, {Seq, ?b2i(Deleted), split_sizes(SizeInfo), disk_tree(Tree)}}. + {Id, {Seq, ?b2i(Deleted), split_sizes(SizeInfo), disk_tree(Tree), split_access(Access)}}. id_tree_join(Id, {HighSeq, Deleted, DiskTree}) -> % Handle old formats before data_size was added - id_tree_join(Id, {HighSeq, Deleted, #size_info{}, DiskTree}); -id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree}) -> + id_tree_join(Id, {HighSeq, Deleted, #size_info{}, DiskTree, []}); +id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, Access}) -> #full_doc_info{ id = Id, update_seq = HighSeq, deleted = ?i2b(Deleted), sizes = couch_db_updater:upgrade_sizes(Sizes), - rev_tree = rev_tree(DiskTree) + rev_tree = rev_tree(DiskTree), + access = join_access(Access) }. id_tree_reduce(reduce, FullDocInfos) -> @@ -714,19 +716,21 @@ seq_tree_split(#full_doc_info{} = Info) -> update_seq = Seq, deleted = Del, sizes = SizeInfo, - rev_tree = Tree + rev_tree = Tree, + access = Access } = Info, - {Seq, {Id, ?b2i(Del), split_sizes(SizeInfo), disk_tree(Tree)}}. + {Seq, {Id, ?b2i(Del), split_sizes(SizeInfo), disk_tree(Tree), split_access(Access)}}. seq_tree_join(Seq, {Id, Del, DiskTree}) when is_integer(Del) -> - seq_tree_join(Seq, {Id, Del, {0, 0}, DiskTree}); -seq_tree_join(Seq, {Id, Del, Sizes, DiskTree}) when is_integer(Del) -> + seq_tree_join(Seq, {Id, Del, {0, 0}, DiskTree, []}); +seq_tree_join(Seq, {Id, Del, Sizes, DiskTree, Access}) when is_integer(Del) -> #full_doc_info{ id = Id, update_seq = Seq, deleted = ?i2b(Del), sizes = join_sizes(Sizes), - rev_tree = rev_tree(DiskTree) + rev_tree = rev_tree(DiskTree), + access = join_access(Access) }; seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos}) -> % Older versions stored #doc_info records in the seq_tree. @@ -755,6 +759,9 @@ seq_tree_reduce(reduce, DocInfos) -> seq_tree_reduce(rereduce, Reds) -> lists:sum(Reds). +join_access(Access) -> Access. +split_access(Access) -> Access. + local_tree_split(#doc{revs = {0, [Rev]}} = Doc) when is_binary(Rev) -> #doc{ id = Id, diff --git a/src/couch/src/couch_btree.erl b/src/couch/src/couch_btree.erl index b974a22eeca..d7ca7bab4bf 100644 --- a/src/couch/src/couch_btree.erl +++ b/src/couch/src/couch_btree.erl @@ -16,6 +16,7 @@ -export([fold/4, full_reduce/1, final_reduce/2, size/1, foldl/3, foldl/4]). -export([fold_reduce/4, lookup/2, get_state/1, set_options/2]). -export([extract/2, assemble/3, less/3]). +-export([full_reduce_with_options/2]). -include_lib("couch/include/couch_db.hrl"). @@ -109,6 +110,17 @@ full_reduce(#btree{root = nil, reduce = Reduce}) -> full_reduce(#btree{root = Root}) -> {ok, element(2, Root)}. +full_reduce_with_options(Bt, Options0) -> + CountFun = fun(_SeqStart, PartialReds, 0) -> + {ok, couch_btree:final_reduce(Bt, PartialReds)} + end, + [UserName] = proplists:get_value(start_key, Options0, <<"">>), + EndKey = {[UserName, {[]}]}, + Options = Options0 ++ [ + {end_key, EndKey} + ], + fold_reduce(Bt, CountFun, 0, Options). + size(#btree{root = nil}) -> 0; size(#btree{root = {_P, _Red}}) -> From ae300745ba394a5b6767db8540e4f1f2290dd698 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 24 Jun 2022 18:43:52 +0200 Subject: [PATCH 07/32] feat(access): handle access in couch_db[_updater] --- src/couch/src/couch_db.erl | 219 +++++++++++++++++++++++++---- src/couch/src/couch_db_int.hrl | 3 +- src/couch/src/couch_db_updater.erl | 154 ++++++++++++++++---- 3 files changed, 319 insertions(+), 57 deletions(-) diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index b02aae446f1..8b00d6ee0e2 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -31,6 +31,9 @@ is_admin/1, check_is_admin/1, check_is_member/1, + validate_access/2, + check_access/2, + has_access_enabled/1, name/1, get_after_doc_read_fun/1, @@ -137,6 +140,7 @@ ]). -include_lib("couch/include/couch_db.hrl"). +-include_lib("couch_mrview/include/couch_mrview.hrl"). % TODO: can we do without this? -include("couch_db_int.hrl"). -define(DBNAME_REGEX, @@ -288,6 +292,12 @@ wait_for_compaction(#db{main_pid = Pid} = Db, Timeout) -> is_compacting(DbName) -> couch_server:is_compacting(DbName). +has_access_enabled(#db{access=true}) -> true; +has_access_enabled(_) -> false. + +is_read_from_ddoc_cache(Options) -> + lists:member(ddoc_cache, Options). + delete_doc(Db, Id, Revisions) -> DeletedDocs = [#doc{id = Id, revs = [Rev], deleted = true} || Rev <- Revisions], {ok, [Result]} = update_docs(Db, DeletedDocs, []), @@ -296,23 +306,33 @@ delete_doc(Db, Id, Revisions) -> open_doc(Db, IdOrDocInfo) -> open_doc(Db, IdOrDocInfo, []). -open_doc(Db, Id, Options) -> +open_doc(Db, Id, Options0) -> increment_stat(Db, [couchdb, database_reads]), + Options = case has_access_enabled(Db) of + true -> Options0 ++ [conflicts]; + _Else -> Options0 + end, case open_doc_int(Db, Id, Options) of {ok, #doc{deleted = true} = Doc} -> case lists:member(deleted, Options) of true -> - apply_open_options({ok, Doc}, Options); + {ok, Doc}; false -> {not_found, deleted} end; Else -> - apply_open_options(Else, Options) + Else end. -apply_open_options({ok, Doc}, Options) -> +apply_open_options(Db, {ok, Doc}, Options) -> + ok = validate_access(Db, Doc, Options), + apply_open_options1({ok, Doc}, Options); +apply_open_options(_Db, Else, _Options) -> + Else. + +apply_open_options1({ok, Doc}, Options) -> apply_open_options2(Doc, Options); -apply_open_options(Else, _Options) -> +apply_open_options1(Else, _Options) -> Else. apply_open_options2(Doc, []) -> @@ -365,7 +385,7 @@ open_doc_revs(Db, IdRevsOpts, Options) when is_list(IdRevsOpts) -> open_doc_revs(Db, Id, Revs, Options) -> increment_stat(Db, [couchdb, database_reads]), [{ok, Results}] = open_doc_revs_int(Db, [{Id, Revs}], Options), - {ok, [apply_open_options(Result, Options) || Result <- Results]}. + {ok, [apply_open_options(Db, Result, Options) || Result <- Results]}. % Each returned result is a list of tuples: % {Id, MissingRevs, PossibleAncestors} @@ -609,7 +629,8 @@ get_db_info(Db) -> name = Name, compactor_pid = Compactor, instance_start_time = StartTime, - committed_update_seq = CommittedUpdateSeq + committed_update_seq = CommittedUpdateSeq, + access = Access } = Db, {ok, DocCount} = get_doc_count(Db), {ok, DelDocCount} = get_del_doc_count(Db), @@ -644,7 +665,8 @@ get_db_info(Db) -> {committed_update_seq, CommittedUpdateSeq}, {compacted_seq, CompactedSeq}, {props, Props}, - {uuid, Uuid} + {uuid, Uuid}, + {access, Access} ], {ok, InfoList}. @@ -769,6 +791,72 @@ security_error_type(#user_ctx{name = null}) -> security_error_type(#user_ctx{name = _}) -> forbidden. +is_per_user_ddoc(#doc{access=[]}) -> false; +is_per_user_ddoc(#doc{access=[<<"_users">>]}) -> false; +is_per_user_ddoc(_) -> true. + +validate_access(Db, Doc) -> + validate_access(Db, Doc, []). + +validate_access(Db, Doc, Options) -> + validate_access1(has_access_enabled(Db), Db, Doc, Options). + +validate_access1(false, _Db, _Doc, _Options) -> ok; +validate_access1(true, Db, #doc{meta=Meta}=Doc, Options) -> + case proplists:get_value(conflicts, Meta) of + undefined -> % no conflicts + case is_read_from_ddoc_cache(Options) andalso is_per_user_ddoc(Doc) of + true -> throw({not_found, missing}); + _False -> validate_access2(Db, Doc) + end; + _Else -> % only admins can read conflicted docs in _access dbs + case is_admin(Db) of + true -> ok; + _Else2 -> throw({forbidden, <<"document is in conflict">>}) + end + end. +validate_access2(Db, Doc) -> + validate_access3(check_access(Db, Doc)). + +validate_access3(true) -> ok; +validate_access3(_) -> throw({forbidden, <<"can't touch this">>}). + +check_access(Db, #doc{access=Access}) -> + check_access(Db, Access); +check_access(Db, Access) -> + #user_ctx{ + name=UserName, + roles=UserRoles + } = Db#db.user_ctx, + case Access of + [] -> + % if doc has no _access, userCtX must be admin + is_admin(Db); + Access -> + % if doc has _access, userCtx must be admin OR matching user or role + % _access = ["a", "b", ] + case is_admin(Db) of + true -> + true; + _ -> + case {check_name(UserName, Access), check_roles(UserRoles, Access)} of + {true, _} -> true; + {_, true} -> true; + _ -> false + end + end + end. + +check_name(null, _Access) -> true; +check_name(UserName, Access) -> + lists:member(UserName, Access). +% nicked from couch_db:check_security + +check_roles(Roles, Access) -> + UserRolesSet = ordsets:from_list(Roles), + RolesSet = ordsets:from_list(Access ++ ["_users"]), + not ordsets:is_disjoint(UserRolesSet, RolesSet). + get_admins(#db{security = SecProps}) -> couch_util:get_value(<<"admins">>, SecProps, {[]}). @@ -910,9 +998,14 @@ group_alike_docs([Doc | Rest], [Bucket | RestBuckets]) -> end. validate_doc_update(#db{} = Db, #doc{id = <<"_design/", _/binary>>} = Doc, _GetDiskDocFun) -> - case catch check_is_admin(Db) of - ok -> validate_ddoc(Db, Doc); - Error -> Error + case couch_doc:has_access(Doc) of + true -> + validate_ddoc(Db, Doc); + _Else -> + case catch check_is_admin(Db) of + ok -> validate_ddoc(Db, Doc); + Error -> Error + end end; validate_doc_update(#db{validate_doc_funs = undefined} = Db, Doc, Fun) -> ValidationFuns = load_validation_funs(Db), @@ -1307,6 +1400,32 @@ doc_tag(#doc{meta = Meta}) -> Else -> throw({invalid_doc_tag, Else}) end. +validate_update(Db, Doc) -> + case catch validate_access(Db, Doc) of + ok -> Doc; + Error -> Error + end. + + +validate_docs_access(Db, DocBuckets, DocErrors) -> + validate_docs_access1(Db, DocBuckets, {[], DocErrors}). + +validate_docs_access1(_Db, [], {DocBuckets0, DocErrors}) -> + DocBuckets1 = lists:reverse(lists:map(fun lists:reverse/1, DocBuckets0)), + DocBuckets = case DocBuckets1 of + [[]] -> []; + Else -> Else + end, + {ok, DocBuckets, lists:reverse(DocErrors)}; +validate_docs_access1(Db, [DocBucket|RestBuckets], {DocAcc, ErrorAcc}) -> + {NewBuckets, NewErrors} = lists:foldl(fun(Doc, {Acc, ErrAcc}) -> + case catch validate_access(Db, Doc) of + ok -> {[Doc|Acc], ErrAcc}; + Error -> {Acc, [{doc_tag(Doc), Error}|ErrAcc]} + end + end, {[], ErrorAcc}, DocBucket), + validate_docs_access1(Db, RestBuckets, {[NewBuckets | DocAcc], NewErrors}). + update_docs(Db, Docs0, Options, ?REPLICATED_CHANGES) -> Docs = tag_docs(Docs0), @@ -1330,13 +1449,35 @@ update_docs(Db, Docs0, Options, ?REPLICATED_CHANGES) -> ] || Bucket <- DocBuckets ], - {ok, _} = write_and_commit( + {ok, Results} = write_and_commit( Db, DocBuckets2, NonRepDocs, [merge_conflicts | Options] ), - {ok, DocErrors}; + case couch_db:has_access_enabled(Db) of + false -> + % we’re done here + {ok, DocErrors}; + _ -> + AccessViolations = lists:filter(fun({_Ref, Tag}) -> Tag =:= access end, Results), + case length(AccessViolations) of + 0 -> + % we’re done here + {ok, DocErrors}; + _ -> + % dig out FDIs from Docs matching our tags/refs + DocsDict = lists:foldl(fun(Doc, Dict) -> + Tag = doc_tag(Doc), + dict:store(Tag, Doc, Dict) + end, dict:new(), Docs), + AccessResults = lists:map(fun({Ref, Access}) -> + { dict:fetch(Ref, DocsDict), Access } + end, AccessViolations), + {ok, AccessResults} + end + end; + update_docs(Db, Docs0, Options, ?INTERACTIVE_EDIT) -> Docs = tag_docs(Docs0), @@ -1458,7 +1599,7 @@ write_and_commit( MergeConflicts = lists:member(merge_conflicts, Options), MRef = erlang:monitor(process, Pid), try - Pid ! {update_docs, self(), DocBuckets, NonRepDocs, MergeConflicts}, + Pid ! {update_docs, self(), DocBuckets, NonRepDocs, MergeConflicts, Ctx}, case collect_results_with_metrics(Pid, MRef, []) of {ok, Results} -> {ok, Results}; @@ -1473,7 +1614,7 @@ write_and_commit( % We only retry once DocBuckets3 = prepare_doc_summaries(Db2, DocBuckets2), close(Db2), - Pid ! {update_docs, self(), DocBuckets3, NonRepDocs, MergeConflicts}, + Pid ! {update_docs, self(), DocBuckets3, NonRepDocs, MergeConflicts, Ctx}, case collect_results_with_metrics(Pid, MRef, []) of {ok, Results} -> {ok, Results}; retry -> throw({update_error, compaction_retry}) @@ -1685,6 +1826,12 @@ open_read_stream(Db, AttState) -> is_active_stream(Db, StreamEngine) -> couch_db_engine:is_active_stream(Db, StreamEngine). +changes_since(Db, StartSeq, Fun, Options, Acc) when is_record(Db, db) -> + case couch_db:has_access_enabled(Db) and not couch_db:is_admin(Db) of + true -> couch_mrview:query_changes_access(Db, StartSeq, Fun, Options, Acc); + false -> couch_db_engine:fold_changes(Db, StartSeq, Fun, Options, Acc) + end. + calculate_start_seq(_Db, _Node, Seq) when is_integer(Seq) -> Seq; calculate_start_seq(Db, Node, {Seq, Uuid}) -> @@ -1813,7 +1960,10 @@ fold_changes(Db, StartSeq, UserFun, UserAcc) -> fold_changes(Db, StartSeq, UserFun, UserAcc, []). fold_changes(Db, StartSeq, UserFun, UserAcc, Opts) -> - couch_db_engine:fold_changes(Db, StartSeq, UserFun, UserAcc, Opts). + case couch_db:has_access_enabled(Db) and not couch_db:is_admin(Db) of + true -> couch_mrview:query_changes_access(Db, StartSeq, UserFun, Opts, UserAcc); + false -> couch_db_engine:fold_changes(Db, StartSeq, UserFun, UserAcc, Opts) + end. fold_purge_infos(Db, StartPurgeSeq, Fun, Acc) -> fold_purge_infos(Db, StartPurgeSeq, Fun, Acc, []). @@ -1831,7 +1981,7 @@ open_doc_revs_int(Db, IdRevs, Options) -> lists:zipwith( fun({Id, Revs}, Lookup) -> case Lookup of - #full_doc_info{rev_tree = RevTree} -> + #full_doc_info{rev_tree = RevTree, access = Access} -> {FoundRevs, MissingRevs} = case Revs of all -> @@ -1852,7 +2002,7 @@ open_doc_revs_int(Db, IdRevs, Options) -> % we have the rev in our list but know nothing about it {{not_found, missing}, {Pos, Rev}}; #leaf{deleted = IsDeleted, ptr = SummaryPtr} -> - {ok, make_doc(Db, Id, IsDeleted, SummaryPtr, FoundRevPath)} + {ok, make_doc(Db, Id, IsDeleted, SummaryPtr, FoundRevPath, Access)} end end, FoundRevs @@ -1874,23 +2024,29 @@ open_doc_revs_int(Db, IdRevs, Options) -> open_doc_int(Db, <> = Id, Options) -> case couch_db_engine:open_local_docs(Db, [Id]) of [#doc{} = Doc] -> - apply_open_options({ok, Doc}, Options); + case Doc#doc.body of + { Body } -> + Access = couch_util:get_value(<<"_access">>, Body), + apply_open_options(Db, {ok, Doc#doc{access = Access}}, Options); + _Else -> + apply_open_options(Db, {ok, Doc}, Options) + end; [not_found] -> {not_found, missing} end; -open_doc_int(Db, #doc_info{id = Id, revs = [RevInfo | _]} = DocInfo, Options) -> +open_doc_int(Db, #doc_info{id = Id, revs = [RevInfo | _], access = Access} = DocInfo, Options) -> #rev_info{deleted = IsDeleted, rev = {Pos, RevId}, body_sp = Bp} = RevInfo, - Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId]}), + Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId], Access}), apply_open_options( - {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, Options + {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, Options, Access ); -open_doc_int(Db, #full_doc_info{id = Id, rev_tree = RevTree} = FullDocInfo, Options) -> +open_doc_int(Db, #full_doc_info{id = Id, rev_tree = RevTree, access = Access} = FullDocInfo, Options) -> #doc_info{revs = [#rev_info{deleted = IsDeleted, rev = Rev, body_sp = Bp} | _]} = DocInfo = couch_doc:to_doc_info(FullDocInfo), {[{_, RevPath}], []} = couch_key_tree:get(RevTree, [Rev]), - Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath), + Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath, Access), apply_open_options( - {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, Options + {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, Options, Access ); open_doc_int(Db, Id, Options) -> case get_full_doc_info(Db, Id) of @@ -1951,21 +2107,26 @@ doc_meta_info( true -> [{local_seq, Seq}] end. -make_doc(_Db, Id, Deleted, nil = _Bp, RevisionPath) -> +make_doc(Db, Id, Deleted, Bp, {Pos, Revs}) -> + make_doc(Db, Id, Deleted, Bp, {Pos, Revs}, []). + +make_doc(_Db, Id, Deleted, nil = _Bp, RevisionPath, Access) -> #doc{ id = Id, revs = RevisionPath, body = [], atts = [], - deleted = Deleted + deleted = Deleted, + access = Access }; -make_doc(#db{} = Db, Id, Deleted, Bp, {Pos, Revs}) -> +make_doc(#db{} = Db, Id, Deleted, Bp, {Pos, Revs}, Access) -> RevsLimit = get_revs_limit(Db), Doc0 = couch_db_engine:read_doc_body(Db, #doc{ id = Id, revs = {Pos, lists:sublist(Revs, 1, RevsLimit)}, body = Bp, - deleted = Deleted + deleted = Deleted, + access = Access }), Doc1 = case Doc0#doc.atts of diff --git a/src/couch/src/couch_db_int.hrl b/src/couch/src/couch_db_int.hrl index 7da0ce5dfe2..b67686fab88 100644 --- a/src/couch/src/couch_db_int.hrl +++ b/src/couch/src/couch_db_int.hrl @@ -37,7 +37,8 @@ waiting_delayed_commit_deprecated, options = [], - compression + compression, + access = false }). diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index fac5aee1068..8c487ea4d37 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -24,6 +24,11 @@ % 10 GiB -define(DEFAULT_MAX_PARTITION_SIZE, 16#280000000). +-define(DEFAULT_SECURITY_OBJECT, [ + {<<"members">>,{[{<<"roles">>,[<<"_admin">>]}]}}, + {<<"admins">>, {[{<<"roles">>,[<<"_admin">>]}]}} +]). + -record(merge_acc, { revs_limit, merge_conflicts, @@ -36,7 +41,7 @@ init({Engine, DbName, FilePath, Options0}) -> erlang:put(io_priority, {db_update, DbName}), update_idle_limit_from_config(), - DefaultSecObj = default_security_object(DbName), + DefaultSecObj = default_security_object(DbName, Options0), Options = [{default_security_object, DefaultSecObj} | Options0], try {ok, EngineState} = couch_db_engine:init(Engine, FilePath, Options), @@ -165,7 +170,7 @@ handle_cast(Msg, #db{name = Name} = Db) -> {stop, Msg, Db}. handle_info( - {update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts}, + {update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts, UserCtx}, Db ) -> GroupedDocs2 = sort_and_tag_grouped_docs(Client, GroupedDocs), @@ -181,7 +186,7 @@ handle_info( Clients = [Client] end, NonRepDocs2 = [{Client, NRDoc} || NRDoc <- NonRepDocs], - try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts) of + try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts, UserCtx) of {ok, Db2, UpdatedDDocIds} -> ok = couch_server:db_updated(Db2), case {couch_db:get_update_seq(Db), couch_db:get_update_seq(Db2)} of @@ -260,7 +265,11 @@ sort_and_tag_grouped_docs(Client, GroupedDocs) -> % The merge_updates function will fail and the database can end up with % duplicate documents if the incoming groups are not sorted, so as a sanity % check we sort them again here. See COUCHDB-2735. - Cmp = fun([#doc{id = A} | _], [#doc{id = B} | _]) -> A < B end, + Cmp = fun + ([], []) -> false; % TODO: re-evaluate this addition, might be + % superflous now + ([#doc{id=A}|_], [#doc{id=B}|_]) -> A < B + end, lists:map( fun(DocGroup) -> [{Client, maybe_tag_doc(D)} || D <- DocGroup] @@ -320,6 +329,7 @@ init_db(DbName, FilePath, EngineState, Options) -> BDU = couch_util:get_value(before_doc_update, Options, nil), ADR = couch_util:get_value(after_doc_read, Options, nil), + Access = couch_util:get_value(access, Options, false), NonCreateOpts = [Opt || Opt <- Options, Opt /= create], InitDb = #db{ @@ -329,7 +339,8 @@ init_db(DbName, FilePath, EngineState, Options) -> instance_start_time = StartTime, options = NonCreateOpts, before_doc_update = BDU, - after_doc_read = ADR + after_doc_read = ADR, + access = Access }, DbProps = couch_db_engine:get_props(InitDb), @@ -390,7 +401,8 @@ flush_trees( active = WrittenSize, external = ExternalSize }, - atts = AttSizeInfo + atts = AttSizeInfo, + access = NewDoc#doc.access }, {Leaf, add_sizes(Type, Leaf, SizesAcc)}; #leaf{} -> @@ -474,6 +486,9 @@ doc_tag(#doc{meta = Meta}) -> Else -> throw({invalid_doc_tag, Else}) end. +merge_rev_trees([[]], [], Acc) -> + % validate_docs_access left us with no docs to merge + {ok, Acc}; merge_rev_trees([], [], Acc) -> {ok, Acc#merge_acc{ add_infos = lists:reverse(Acc#merge_acc.add_infos) @@ -655,22 +670,29 @@ maybe_stem_full_doc_info(#full_doc_info{rev_tree = Tree} = Info, Limit) -> Info end. -update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> +update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> UpdateSeq = couch_db_engine:get_update_seq(Db), RevsLimit = couch_db_engine:get_revs_limit(Db), - Ids = [Id || [{_Client, #doc{id = Id}} | _] <- DocsList], + Ids = [Id || [{_Client, #doc{id=Id}}|_] <- DocsList], + % TODO: maybe a perf hit, instead of zip3-ing existing Accesses into + % our doc lists, maybe find 404 docs differently down in + % validate_docs_access (revs is [], which we can then use + % to skip validation as we know it is the first doc rev) + Accesses = [Access || [{_Client, #doc{access=Access}}|_] <- DocsList], + % lookup up the old documents, if they exist. OldDocLookups = couch_db_engine:open_docs(Db, Ids), - OldDocInfos = lists:zipwith( + OldDocInfos = lists:zipwith3( fun - (_Id, #full_doc_info{} = FDI) -> + (_Id, #full_doc_info{} = FDI, _Access) -> FDI; - (Id, not_found) -> - #full_doc_info{id = Id} + (Id, not_found, Access) -> + #full_doc_info{id=Id,access=Access} end, Ids, - OldDocLookups + OldDocLookups, + Accesses ), %% Get the list of full partitions @@ -707,7 +729,14 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> cur_seq = UpdateSeq, full_partitions = FullPartitions }, - {ok, AccOut} = merge_rev_trees(DocsList, OldDocInfos, AccIn), + % Loop over DocsList, validate_access for each OldDocInfo on Db, + %. if no OldDocInfo, then send to DocsListValidated, keep OldDocsInfo + % if valid, then send to DocsListValidated, OldDocsInfo + %. if invalid, then send_result tagged `access`(c.f. `conflict) + %. and don’t add to DLV, nor ODI + + { DocsListValidated, OldDocInfosValidated } = validate_docs_access(Db, UserCtx, DocsList, OldDocInfos), + {ok, AccOut} = merge_rev_trees(DocsListValidated, OldDocInfosValidated, AccIn), #merge_acc{ add_infos = NewFullDocInfos, rem_seqs = RemSeqs @@ -717,7 +746,8 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> % the trees, the attachments are already written to disk) {ok, IndexFDIs} = flush_trees(Db, NewFullDocInfos, []), Pairs = pair_write_info(OldDocLookups, IndexFDIs), - LocalDocs2 = update_local_doc_revs(LocalDocs), + LocalDocs1 = apply_local_docs_access(Db, LocalDocs), + LocalDocs2 = update_local_doc_revs(LocalDocs1), {ok, Db1} = couch_db_engine:write_doc_infos(Db, Pairs, LocalDocs2), @@ -732,18 +762,87 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> length(LocalDocs2) ), - % Check if we just updated any design documents, and update the validation - % funs if we did. + % Check if we just updated any non-access design documents, + % and update the validation funs if we did. + NonAccessIds = [Id || [{_Client, #doc{id=Id,access=[]}}|_] <- DocsList], UpdatedDDocIds = lists:flatmap( fun (<<"_design/", _/binary>> = Id) -> [Id]; (_) -> [] end, - Ids + NonAccessIds ), {ok, commit_data(Db1), UpdatedDDocIds}. +% check_access(Db, UserCtx, Access) -> +% check_access(Db, UserCtx, couch_db:has_access_enabled(Db), Access). +% +% check_access(_Db, UserCtx, false, _Access) -> +% true; + +% at this point, we already validated this Db is access enabled, so do the checks right away. +check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx=UserCtx}, Access). + +% TODO: looks like we go into validation here unconditionally and only check in +% check_access() whether the Db has_access_enabled(), we should do this +% here on the outside. Might be our perf issue. +% However, if it is, that means we have to speed this up as it would still +% be too slow for when access is enabled. +validate_docs_access(Db, UserCtx, DocsList, OldDocInfos) -> + case couch_db:has_access_enabled(Db) of + true -> validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos); + _Else -> { DocsList, OldDocInfos } + end. + +validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos) -> + validate_docs_access(Db, UserCtx, DocsList, OldDocInfos, [], []). + +validate_docs_access(_Db, UserCtx, [], [], DocsListValidated, OldDocInfosValidated) -> + { lists:reverse(DocsListValidated), lists:reverse(OldDocInfosValidated) }; +validate_docs_access(Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], DocsListValidated, OldDocInfosValidated) -> + % loop over Docs as {Client, NewDoc} + % validate Doc + % if valid, then put back in Docs + % if not, then send_result and skip + NewDocs = lists:foldl(fun({ Client, Doc }, Acc) -> + % check if we are allowed to update the doc, skip when new doc + OldDocMatchesAccess = case OldInfo#full_doc_info.rev_tree of + [] -> true; + _ -> check_access(Db, UserCtx, OldInfo#full_doc_info.access) + end, + + NewDocMatchesAccess = check_access(Db, UserCtx, Doc#doc.access), + case OldDocMatchesAccess andalso NewDocMatchesAccess of + true -> % if valid, then send to DocsListValidated, OldDocsInfo + % and store the access context on the new doc + [{Client, Doc} | Acc]; + _Else2 -> % if invalid, then send_result tagged `access`(c.f. `conflict) + % and don’t add to DLV, nor ODI + send_result(Client, Doc, access), + Acc + end + end, [], Docs), + + { NewDocsListValidated, NewOldDocInfosValidated } = case length(NewDocs) of + 0 -> % we sent out all docs as invalid access, drop the old doc info associated with it + { [NewDocs | DocsListValidated], OldDocInfosValidated }; + _ -> + { [NewDocs | DocsListValidated], [OldInfo | OldDocInfosValidated] } + end, + validate_docs_access(Db, UserCtx, DocRest, OldInfoRest, NewDocsListValidated, NewOldDocInfosValidated). + +apply_local_docs_access(Db, Docs) -> + apply_local_docs_access1(couch_db:has_access_enabled(Db), Docs). + +apply_local_docs_access1(false, Docs) -> + Docs; +apply_local_docs_access1(true, Docs) -> + lists:map(fun({Client, #doc{access = Access, body = {Body}} = Doc}) -> + Doc1 = Doc#doc{body = {[{<<"_access">>, Access} | Body]}}, + {Client, Doc1} + end, Docs). + update_local_doc_revs(Docs) -> lists:foldl( fun({Client, Doc}, Acc) -> @@ -760,6 +859,14 @@ update_local_doc_revs(Docs) -> Docs ). +default_security_object(DbName, []) -> + default_security_object(DbName); +default_security_object(DbName, Options) -> + case lists:member({access, true}, Options) of + false -> default_security_object(DbName); + true -> ?DEFAULT_SECURITY_OBJECT + end. + increment_local_doc_revs(#doc{deleted = true} = Doc) -> {ok, Doc#doc{revs = {0, [0]}}}; increment_local_doc_revs(#doc{revs = {0, []}} = Doc) -> @@ -924,21 +1031,14 @@ get_meta_body_size(Meta) -> default_security_object(<<"shards/", _/binary>>) -> case config:get("couchdb", "default_security", "admin_only") of - "admin_only" -> - [ - {<<"members">>, {[{<<"roles">>, [<<"_admin">>]}]}}, - {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} - ]; + "admin_only" -> ?DEFAULT_SECURITY_OBJECT; Everyone when Everyone == "everyone"; Everyone == "admin_local" -> [] end; default_security_object(_DbName) -> case config:get("couchdb", "default_security", "admin_only") of Admin when Admin == "admin_only"; Admin == "admin_local" -> - [ - {<<"members">>, {[{<<"roles">>, [<<"_admin">>]}]}}, - {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} - ]; + ?DEFAULT_SECURITY_OBJECT; "everyone" -> [] end. From 1d7162e1f3e383461da21f3193b28b2c50588a86 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 25 Jun 2022 11:10:19 +0200 Subject: [PATCH 08/32] feat(access): add util functions --- src/couch/src/couch_util.erl | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/couch/src/couch_util.erl b/src/couch/src/couch_util.erl index dc58e2bf60e..cb29ce8b3ef 100644 --- a/src/couch/src/couch_util.erl +++ b/src/couch/src/couch_util.erl @@ -46,6 +46,7 @@ -export([verify_hash_names/2]). -export([get_config_hash_algorithms/0]). -export([remove_sensitive_data/1]). +-export([validate_design_access/1, validate_design_access/2]). -include_lib("couch/include/couch_db.hrl"). @@ -867,3 +868,16 @@ remove_sensitive_data(KVList) -> KVList1 = lists:keyreplace(<<"password">>, 1, KVList, {<<"password">>, <<"****">>}), % some KVList entries are atoms, so test fo this too lists:keyreplace(password, 1, KVList1, {password, <<"****">>}). + +validate_design_access(DDoc) -> + validate_design_access1(DDoc, true). + +validate_design_access(Db, DDoc) -> + validate_design_access1(DDoc, couch_db:has_access_enabled(Db)). + +validate_design_access1(_DDoc, false) -> ok; +validate_design_access1(DDoc, true) -> + is_users_ddoc(DDoc). + +is_users_ddoc(#doc{access=[<<"_users">>]}) -> ok; +is_users_ddoc(_) -> throw({forbidden, <<"per-user ddoc access">>}). From 8c3e295ce9c03304250308822834309320b00c97 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 25 Jun 2022 11:17:27 +0200 Subject: [PATCH 09/32] feat(access): adjust existing tests --- src/couch/test/eunit/couchdb_mrview_cors_tests.erl | 3 ++- src/couch/test/eunit/couchdb_update_conflicts_tests.erl | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/couch/test/eunit/couchdb_mrview_cors_tests.erl b/src/couch/test/eunit/couchdb_mrview_cors_tests.erl index 9822542f3f7..5fa547d627a 100644 --- a/src/couch/test/eunit/couchdb_mrview_cors_tests.erl +++ b/src/couch/test/eunit/couchdb_mrview_cors_tests.erl @@ -18,6 +18,7 @@ -define(DDOC, {[ {<<"_id">>, <<"_design/foo">>}, + {<<"_access">>, [<<"user_a">>]}, {<<"shows">>, {[ {<<"bar">>, <<"function(doc, req) {return '

wosh

';}">>} @@ -97,7 +98,7 @@ should_make_shows_request(_, {Host, DbName}) -> end). create_db(backdoor, DbName) -> - {ok, Db} = couch_db:create(DbName, [?ADMIN_CTX]), + {ok, Db} = couch_db:create(DbName, [?ADMIN_CTX, {access, true}]), couch_db:close(Db); create_db(clustered, DbName) -> {ok, Status, _, _} = test_request:put(db_url(DbName), [?AUTH], ""), diff --git a/src/couch/test/eunit/couchdb_update_conflicts_tests.erl b/src/couch/test/eunit/couchdb_update_conflicts_tests.erl index 0722103a4ed..847125a50d9 100644 --- a/src/couch/test/eunit/couchdb_update_conflicts_tests.erl +++ b/src/couch/test/eunit/couchdb_update_conflicts_tests.erl @@ -19,7 +19,7 @@ -define(DOC_ID, <<"foobar">>). -define(LOCAL_DOC_ID, <<"_local/foobar">>). -define(NUM_CLIENTS, [100, 500, 1000, 2000, 5000, 10000]). --define(TIMEOUT, 20000). +-define(TIMEOUT, 100000). start() -> test_util:start_couch(). From 9d3fe316d68da55967ade592205f49e9d0a89735 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 25 Jun 2022 11:28:53 +0200 Subject: [PATCH 10/32] feat(access): add mrview machinery --- src/couch_index/src/couch_index_updater.erl | 35 ++++-- src/couch_mrview/include/couch_mrview.hrl | 3 +- src/couch_mrview/src/couch_mrview.erl | 112 +++++++++++++++++- src/couch_mrview/src/couch_mrview_updater.erl | 46 ++++++- src/couch_mrview/src/couch_mrview_util.erl | 9 +- 5 files changed, 186 insertions(+), 19 deletions(-) diff --git a/src/couch_index/src/couch_index_updater.erl b/src/couch_index/src/couch_index_updater.erl index fe2150505ae..66d76062200 100644 --- a/src/couch_index/src/couch_index_updater.erl +++ b/src/couch_index/src/couch_index_updater.erl @@ -123,8 +123,8 @@ update(Idx, Mod, IdxState) -> IncludeDesign = lists:member(include_design, UpdateOpts), DocOpts = case lists:member(local_seq, UpdateOpts) of - true -> [conflicts, deleted_conflicts, local_seq]; - _ -> [conflicts, deleted_conflicts] + true -> [conflicts, deleted_conflicts, local_seq, deleted]; + _ -> [conflicts, deleted_conflicts,local_seq, deleted] end, couch_util:with_db(DbName, fun(Db) -> @@ -142,23 +142,36 @@ update(Idx, Mod, IdxState) -> end, GetInfo = fun - (#full_doc_info{id = Id, update_seq = Seq, deleted = Del} = FDI) -> - {Id, Seq, Del, couch_doc:to_doc_info(FDI)}; - (#doc_info{id = Id, high_seq = Seq, revs = [RI | _]} = DI) -> - {Id, Seq, RI#rev_info.deleted, DI} + (#full_doc_info{id=Id, update_seq=Seq, deleted=Del,access=Access}=FDI) -> + {Id, Seq, Del, couch_doc:to_doc_info(FDI), Access}; + (#doc_info{id=Id, high_seq=Seq, revs=[RI|_],access=Access}=DI) -> + {Id, Seq, RI#rev_info.deleted, DI, Access} end, LoadDoc = fun(DI) -> - {DocId, Seq, Deleted, DocInfo} = GetInfo(DI), + {DocId, Seq, Deleted, DocInfo, Access} = GetInfo(DI), case {IncludeDesign, DocId} of {false, <<"_design/", _/binary>>} -> {nil, Seq}; - _ when Deleted -> - {#doc{id = DocId, deleted = true}, Seq}; _ -> - {ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts), - {Doc, Seq} + case IndexName of % TODO: move into outer case statement + <<"_design/_access">> -> + {ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts), + % TODO: hande conflicted docs in _access index + % probably remove + [RevInfo|_] = DocInfo#doc_info.revs, + Doc1 = Doc#doc{ + meta = [{body_sp, RevInfo#rev_info.body_sp}], + access = Access + }, + {Doc1, Seq}; + _ when Deleted -> + {#doc{id=DocId, deleted=true}, Seq}; + _ -> + {ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts), + {Doc, Seq} + end end end, diff --git a/src/couch_mrview/include/couch_mrview.hrl b/src/couch_mrview/include/couch_mrview.hrl index b31463c53fe..ef987595d91 100644 --- a/src/couch_mrview/include/couch_mrview.hrl +++ b/src/couch_mrview/include/couch_mrview.hrl @@ -83,7 +83,8 @@ conflicts, callback, sorted = true, - extra = [] + extra = [], + deleted = false }). -record(vacc, { diff --git a/src/couch_mrview/src/couch_mrview.erl b/src/couch_mrview/src/couch_mrview.erl index d8640c9032c..79b2b8bec90 100644 --- a/src/couch_mrview/src/couch_mrview.erl +++ b/src/couch_mrview/src/couch_mrview.erl @@ -13,7 +13,7 @@ -module(couch_mrview). -export([validate/2]). --export([query_all_docs/2, query_all_docs/4]). +-export([query_all_docs/2, query_all_docs/4, query_changes_access/5]). -export([query_view/3, query_view/4, query_view/6, get_view_index_pid/4]). -export([get_info/2]). -export([trigger_update/2, trigger_update/3]). @@ -259,6 +259,116 @@ query_all_docs(Db, Args) -> query_all_docs(Db, Args, Callback, Acc) when is_list(Args) -> query_all_docs(Db, to_mrargs(Args), Callback, Acc); query_all_docs(Db, Args0, Callback, Acc) -> + case couch_db:has_access_enabled(Db) and not couch_db:is_admin(Db) of + true -> query_all_docs_access(Db, Args0, Callback, Acc); + false -> query_all_docs_admin(Db, Args0, Callback, Acc) + end. +access_ddoc() -> + #doc{ + id = <<"_design/_access">>, + body = {[ + {<<"language">>,<<"_access">>}, + {<<"options">>, {[ + {<<"include_design">>, true} + ]}}, + {<<"views">>, {[ + {<<"_access_by_id">>, {[ + {<<"map">>, <<"_access/by-id-map">>}, + {<<"reduce">>, <<"_count">>} + ]}}, + {<<"_access_by_seq">>, {[ + {<<"map">>, <<"_access/by-seq-map">>}, + {<<"reduce">>, <<"_count">>} + ]}} + ]}} + ]} + }. +query_changes_access(Db, StartSeq, Fun, Options, Acc) -> + DDoc = access_ddoc(), + UserCtx = couch_db:get_user_ctx(Db), + UserName = UserCtx#user_ctx.name, + %% % TODO: add roles + Args1 = prefix_startkey_endkey(UserName, #mrargs{}, fwd), + Args2 = Args1#mrargs{deleted=true}, + Args = Args2#mrargs{reduce=false}, + %% % filter out the user-prefix from the key, so _all_docs looks normal + %% % this isn’t a separate function because I’m binding Callback0 and I don’t + %% % know the Erlang equivalent of JS’s fun.bind(this, newarg) + Callback = fun + ({meta, _}, Acc0) -> + {ok, Acc0}; % ignore for now + ({row, Props}, Acc0) -> + % turn row into FDI + Value = couch_util:get_value(value, Props), + [Owner, Seq] = couch_util:get_value(key, Props), + Rev = couch_util:get_value(rev, Value), + Deleted = couch_util:get_value(deleted, Value, false), + BodySp = couch_util:get_value(body_sp, Value), + [Pos, RevId] = string:split(?b2l(Rev), "-"), + FDI = #full_doc_info{ + id = proplists:get_value(id, Props), + rev_tree = [{list_to_integer(Pos), {?l2b(RevId), #leaf{deleted=Deleted, ptr=BodySp, seq=Seq, sizes=#size_info{}}, []}}], + deleted = Deleted, + update_seq = 0, + sizes = #size_info{}, + access = [Owner] + }, + Fun(FDI, Acc0); + (_Else, Acc0) -> + {ok, Acc0} % ignore for now + end, + VName = <<"_access_by_seq">>, + query_view(Db, DDoc, VName, Args, Callback, Acc). + +query_all_docs_access(Db, Args0, Callback0, Acc) -> + % query our not yest existing, home-grown _access view. + % use query_view for this. + DDoc = access_ddoc(), + UserCtx = couch_db:get_user_ctx(Db), + UserName = UserCtx#user_ctx.name, + Args1 = prefix_startkey_endkey(UserName, Args0, Args0#mrargs.direction), + Args = Args1#mrargs{reduce=false, extra=Args1#mrargs.extra ++ [{all_docs_access, true}]}, + Callback = fun + ({row, Props}, Acc0) -> + % filter out the user-prefix from the key, so _all_docs looks normal + % this isn’t a separate function because I’m binding Callback0 and I + % don’t know the Erlang equivalent of JS’s fun.bind(this, newarg) + [_User, Key] = proplists:get_value(key, Props), + Row0 = proplists:delete(key, Props), + Row = [{key, Key} | Row0], + Callback0({row, Row}, Acc0); + (Row, Acc0) -> + Callback0(Row, Acc0) + end, + VName = <<"_access_by_id">>, + query_view(Db, DDoc, VName, Args, Callback, Acc). + +prefix_startkey_endkey(UserName, Args, fwd) -> + #mrargs{start_key=StartKey, end_key=EndKey} = Args, + Args#mrargs { + start_key = case StartKey of + undefined -> [UserName]; + StartKey -> [UserName, StartKey] + end, + end_key = case EndKey of + undefined -> [UserName, {}]; + EndKey -> [UserName, EndKey, {}] + end + }; + +prefix_startkey_endkey(UserName, Args, rev) -> + #mrargs{start_key=StartKey, end_key=EndKey} = Args, + Args#mrargs { + end_key = case StartKey of + undefined -> [UserName]; + StartKey -> [UserName, StartKey] + end, + start_key = case EndKey of + undefined -> [UserName, {}]; + EndKey -> [UserName, EndKey, {}] + end + }. +query_all_docs_admin(Db, Args0, Callback, Acc) -> Sig = couch_util:with_db(Db, fun(WDb) -> {ok, Info} = couch_db:get_db_info(WDb), couch_index_util:hexsig(couch_hash:md5_hash(term_to_binary(Info))) diff --git a/src/couch_mrview/src/couch_mrview_updater.erl b/src/couch_mrview/src/couch_mrview_updater.erl index 969a8202800..5d58ab05d74 100644 --- a/src/couch_mrview/src/couch_mrview_updater.erl +++ b/src/couch_mrview/src/couch_mrview_updater.erl @@ -124,8 +124,9 @@ process_doc(Doc, Seq, #mrst{doc_acc = Acc} = State) when length(Acc) > 100 -> process_doc(Doc, Seq, State#mrst{doc_acc = []}); process_doc(nil, Seq, #mrst{doc_acc = Acc} = State) -> {ok, State#mrst{doc_acc = [{nil, Seq, nil} | Acc]}}; -process_doc(#doc{id = Id, deleted = true}, Seq, #mrst{doc_acc = Acc} = State) -> - {ok, State#mrst{doc_acc = [{Id, Seq, deleted} | Acc]}}; +% TODO: re-evaluate why this is commented out +% process_doc(#doc{id=Id, deleted=true}, Seq, #mrst{doc_acc=Acc}=State) -> +% {ok, State#mrst{doc_acc=[{Id, Seq, deleted} | Acc]}}; process_doc(#doc{id = Id} = Doc, Seq, #mrst{doc_acc = Acc} = State) -> {ok, State#mrst{doc_acc = [{Id, Seq, Doc} | Acc]}}. @@ -149,6 +150,14 @@ finish_update(#mrst{doc_acc = Acc} = State) -> }} end. +make_deleted_body({Props}, Meta, Seq) -> + BodySp = couch_util:get_value(body_sp, Meta), + Result = [{<<"_seq">>, Seq}, {<<"_body_sp">>, BodySp}], + case couch_util:get_value(<<"_access">>, Props) of + undefined -> Result; + Access -> [{<<"_access">>, Access} | Result] + end. + map_docs(Parent, #mrst{db_name = DbName, idx_name = IdxName} = State0) -> erlang:put(io_priority, {view_update, DbName, IdxName}), case couch_work_queue:dequeue(State0#mrst.doc_queue) of @@ -167,11 +176,38 @@ map_docs(Parent, #mrst{db_name = DbName, idx_name = IdxName} = State0) -> DocFun = fun ({nil, Seq, _}, {SeqAcc, Results}) -> {erlang:max(Seq, SeqAcc), Results}; - ({Id, Seq, deleted}, {SeqAcc, Results}) -> - {erlang:max(Seq, SeqAcc), [{Id, []} | Results]}; + ({Id, Seq, Rev, #doc{deleted=true, body=Body, meta=Meta}}, {SeqAcc, Results}) -> + % _access needs deleted docs + case IdxName of + <<"_design/_access">> -> + % splice in seq + {Start, Rev1} = Rev, + Doc = #doc{ + id = Id, + revs = {Start, [Rev1]}, + body = {make_deleted_body(Body, Meta, Seq)}, %% todo: only keep _access and add _seq + deleted = true + }, + {ok, Res} = couch_query_servers:map_doc_raw(QServer, Doc), + {erlang:max(Seq, SeqAcc), [{Id, Seq, Rev, Res} | Results]}; + _Else -> + {erlang:max(Seq, SeqAcc), [{Id, Seq, Rev, []} | Results]} + end; ({Id, Seq, Doc}, {SeqAcc, Results}) -> couch_stats:increment_counter([couchdb, mrview, map_doc]), - {ok, Res} = couch_query_servers:map_doc_raw(QServer, Doc), + % IdxName: ~p, Doc: ~p~n~n", [IdxName, Doc]), + Doc0 = case IdxName of + <<"_design/_access">> -> + % splice in seq + {Props} = Doc#doc.body, + BodySp = couch_util:get_value(body_sp, Doc#doc.meta), + Doc#doc{ + body = {Props++[{<<"_seq">>, Seq}, {<<"_body_sp">>, BodySp}]} + }; + _Else -> + Doc + end, + {ok, Res} = couch_query_servers:map_doc_raw(QServer, Doc0), {erlang:max(Seq, SeqAcc), [{Id, Res} | Results]} end, FoldFun = fun(Docs, Acc) -> diff --git a/src/couch_mrview/src/couch_mrview_util.erl b/src/couch_mrview/src/couch_mrview_util.erl index e1e75f34fd5..7991487ff6a 100644 --- a/src/couch_mrview/src/couch_mrview_util.erl +++ b/src/couch_mrview/src/couch_mrview_util.erl @@ -21,6 +21,7 @@ -export([index_file/2, compaction_file/2, open_file/1]). -export([delete_files/2, delete_index_file/2, delete_compaction_file/2]). -export([get_row_count/1, all_docs_reduce_to_count/1, reduce_to_count/1]). +-export([get_access_row_count/2]). -export([all_docs_key_opts/1, all_docs_key_opts/2, key_opts/1, key_opts/2]). -export([fold/4, fold_reduce/4]). -export([temp_view_to_ddoc/1]). @@ -437,6 +438,11 @@ reduce_to_count(Reductions) -> FinalReduction = couch_btree:final_reduce(CountReduceFun, Reductions), get_count(FinalReduction). +get_access_row_count(#mrview{btree=Bt}, UserName) -> + couch_btree:full_reduce_with_options(Bt, [ + {start_key, UserName} + ]). + fold(#mrview{btree = Bt}, Fun, Acc, Opts) -> WrapperFun = fun(KV, Reds, Acc2) -> fold_fun(Fun, expand_dups([KV], []), Reds, Acc2) @@ -479,8 +485,9 @@ validate_args(#mrst{} = State, Args0) -> ViewPartitioned = State#mrst.partitioned, Partition = get_extra(Args, partition), + AllDocsAccess = get_extra(Args, all_docs_access, false), - case {ViewPartitioned, Partition} of + case {ViewPartitioned and not AllDocsAccess, Partition} of {true, undefined} -> Msg1 = << "`partition` parameter is mandatory " From 9af77d9915bb2f5666f7f7d385bf48aa84627e74 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 25 Jun 2022 11:29:19 +0200 Subject: [PATCH 11/32] feat(access): add access tests --- src/couch/test/eunit/couchdb_access_tests.erl | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 src/couch/test/eunit/couchdb_access_tests.erl diff --git a/src/couch/test/eunit/couchdb_access_tests.erl b/src/couch/test/eunit/couchdb_access_tests.erl new file mode 100644 index 00000000000..e69de29bb2d From bcd3ccec14ebf26a0a6ac60e557b1b4071100b5e Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Mon, 27 Jun 2022 10:54:36 +0200 Subject: [PATCH 12/32] feat(access): add access handling to replicator --- src/couch_replicator/src/couch_replicator.erl | 8 ++++- .../src/couch_replicator_scheduler_job.erl | 31 ++++++++++++++----- ...couch_replicator_error_reporting_tests.erl | 6 ++-- 3 files changed, 34 insertions(+), 11 deletions(-) diff --git a/src/couch_replicator/src/couch_replicator.erl b/src/couch_replicator/src/couch_replicator.erl index 935daaa80be..ac3807d1160 100644 --- a/src/couch_replicator/src/couch_replicator.erl +++ b/src/couch_replicator/src/couch_replicator.erl @@ -77,7 +77,13 @@ replicate(PostBody, Ctx) -> false -> check_authorization(RepId, UserCtx), {ok, Listener} = rep_result_listener(RepId), - Result = do_replication_loop(Rep), + Result = case do_replication_loop(Rep) of % TODO: review why we need this + {ok, {ResultJson}} -> + {PublicRepId, _} = couch_replicator_ids:replication_id(Rep), % TODO: check with options + {ok, {[{<<"replication_id">>, ?l2b(PublicRepId)} | ResultJson]}}; + Else -> + Else + end, couch_replicator_notifier:stop(Listener), Result end. diff --git a/src/couch_replicator/src/couch_replicator_scheduler_job.erl b/src/couch_replicator/src/couch_replicator_scheduler_job.erl index 416220efd93..ac979d37cef 100644 --- a/src/couch_replicator/src/couch_replicator_scheduler_job.erl +++ b/src/couch_replicator/src/couch_replicator_scheduler_job.erl @@ -66,6 +66,8 @@ rep_starttime, src_starttime, tgt_starttime, + src_access, + tgt_access, % checkpoint timer timer, changes_queue, @@ -676,6 +678,8 @@ init_state(Rep) -> rep_starttime = StartTime, src_starttime = get_value(<<"instance_start_time">>, SourceInfo), tgt_starttime = get_value(<<"instance_start_time">>, TargetInfo), + src_access = get_value(<<"access">>, SourceInfo), + tgt_access = get_value(<<"access">>, TargetInfo), session_id = couch_uuids:random(), source_seq = SourceSeq, use_checkpoints = get_value(use_checkpoints, Options, true), @@ -788,8 +792,10 @@ do_checkpoint(State) -> rep_starttime = ReplicationStartTime, src_starttime = SrcInstanceStartTime, tgt_starttime = TgtInstanceStartTime, + src_access = SrcAccess, + tgt_access = TgtAccess, stats = Stats, - rep_details = #rep{options = Options}, + rep_details = #rep{options = Options, user_ctx = UserCtx}, session_id = SessionId } = State, case commit_to_both(Source, Target) of @@ -853,11 +859,9 @@ do_checkpoint(State) -> try {SrcRevPos, SrcRevId} = update_checkpoint( - Source, SourceLog#doc{body = NewRepHistory}, source - ), + Source, SourceLog#doc{body = NewRepHistory}, SrcAccess, UserCtx, source), {TgtRevPos, TgtRevId} = update_checkpoint( - Target, TargetLog#doc{body = NewRepHistory}, target - ), + Target, TargetLog#doc{body = NewRepHistory}, TgtAccess, UserCtx, target), NewState = State#rep_state{ checkpoint_history = NewRepHistory, committed_seq = NewTsSeq, @@ -885,8 +889,12 @@ do_checkpoint(State) -> end. update_checkpoint(Db, Doc, DbType) -> + update_checkpoint(Db, Doc, false, #user_ctx{}, DbType). +update_checkpoint(Db, Doc) -> + update_checkpoint(Db, Doc, false, #user_ctx{}). +update_checkpoint(Db, Doc, Access, UserCtx, DbType) -> try - update_checkpoint(Db, Doc) + update_checkpoint(Db, Doc, Access, UserCtx) catch throw:{checkpoint_commit_failure, Reason} -> throw( @@ -896,7 +904,14 @@ update_checkpoint(Db, Doc, DbType) -> ) end. -update_checkpoint(Db, #doc{id = LogId, body = LogBody} = Doc) -> +update_checkpoint(Db, #doc{id = LogId} = Doc0, Access, UserCtx) -> + % if db has _access, then: + % get userCtx from replication and splice into doc _access + Doc = case Access of + true -> Doc0#doc{access = [UserCtx#user_ctx.name]}; + _False -> Doc0 + end, + try case couch_replicator_api_wrap:update_doc(Db, Doc, [delay_commit]) of {ok, PosRevId} -> @@ -906,6 +921,8 @@ update_checkpoint(Db, #doc{id = LogId, body = LogBody} = Doc) -> end catch throw:conflict -> + % TODO: An admin could have changed the access on the checkpoint doc. + % However unlikely, we can handle this gracefully here. case (catch couch_replicator_api_wrap:open_doc(Db, LogId, [ejson_body])) of {ok, #doc{body = LogBody, revs = {Pos, [RevId | _]}}} -> % This means that we were able to update successfully the diff --git a/src/couch_replicator/test/eunit/couch_replicator_error_reporting_tests.erl b/src/couch_replicator/test/eunit/couch_replicator_error_reporting_tests.erl index dd6609941f7..618e3013a73 100644 --- a/src/couch_replicator/test/eunit/couch_replicator_error_reporting_tests.erl +++ b/src/couch_replicator/test/eunit/couch_replicator_error_reporting_tests.erl @@ -108,7 +108,7 @@ t_fail_changes_queue({_Ctx, {Source, Target}}) -> RepPid = couch_replicator_test_helper:get_pid(RepId), State = sys:get_state(RepPid), - ChangesQueue = element(20, State), + ChangesQueue = element(22, State), ?assert(is_process_alive(ChangesQueue)), {ok, Listener} = rep_result_listener(RepId), @@ -125,7 +125,7 @@ t_fail_changes_manager({_Ctx, {Source, Target}}) -> RepPid = couch_replicator_test_helper:get_pid(RepId), State = sys:get_state(RepPid), - ChangesManager = element(21, State), + ChangesManager = element(23, State), ?assert(is_process_alive(ChangesManager)), {ok, Listener} = rep_result_listener(RepId), @@ -142,7 +142,7 @@ t_fail_changes_reader_proc({_Ctx, {Source, Target}}) -> RepPid = couch_replicator_test_helper:get_pid(RepId), State = sys:get_state(RepPid), - ChangesReader = element(22, State), + ChangesReader = element(24, State), ?assert(is_process_alive(ChangesReader)), {ok, Listener} = rep_result_listener(RepId), From 4680045257f64e70b3805a1c79d299c6d3b897ad Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Mon, 27 Jun 2022 10:56:56 +0200 Subject: [PATCH 13/32] feat(access): add access handling to ddoc cache --- src/ddoc_cache/src/ddoc_cache_entry_ddocid.erl | 2 +- src/ddoc_cache/src/ddoc_cache_entry_ddocid_rev.erl | 2 +- src/ddoc_cache/src/ddoc_cache_entry_validation_funs.erl | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/ddoc_cache/src/ddoc_cache_entry_ddocid.erl b/src/ddoc_cache/src/ddoc_cache_entry_ddocid.erl index cf40725e4a2..1b2c3db96c8 100644 --- a/src/ddoc_cache/src/ddoc_cache_entry_ddocid.erl +++ b/src/ddoc_cache/src/ddoc_cache_entry_ddocid.erl @@ -28,7 +28,7 @@ ddocid({_, DDocId}) -> DDocId. recover({DbName, DDocId}) -> - fabric:open_doc(DbName, DDocId, [ejson_body, ?ADMIN_CTX]). + fabric:open_doc(DbName, DDocId, [ejson_body, ?ADMIN_CTX, ddoc_cache]). insert({DbName, DDocId}, {ok, #doc{revs = Revs} = DDoc}) -> {Depth, [RevId | _]} = Revs, diff --git a/src/ddoc_cache/src/ddoc_cache_entry_ddocid_rev.erl b/src/ddoc_cache/src/ddoc_cache_entry_ddocid_rev.erl index 5126f52107b..ce95dfc8236 100644 --- a/src/ddoc_cache/src/ddoc_cache_entry_ddocid_rev.erl +++ b/src/ddoc_cache/src/ddoc_cache_entry_ddocid_rev.erl @@ -28,7 +28,7 @@ ddocid({_, DDocId, _}) -> DDocId. recover({DbName, DDocId, Rev}) -> - Opts = [ejson_body, ?ADMIN_CTX], + Opts = [ejson_body, ?ADMIN_CTX, ddoc_cache], {ok, [Resp]} = fabric:open_revs(DbName, DDocId, [Rev], Opts), Resp. diff --git a/src/ddoc_cache/src/ddoc_cache_entry_validation_funs.erl b/src/ddoc_cache/src/ddoc_cache_entry_validation_funs.erl index bcd122252bf..aff5f2d5ab1 100644 --- a/src/ddoc_cache/src/ddoc_cache_entry_validation_funs.erl +++ b/src/ddoc_cache/src/ddoc_cache_entry_validation_funs.erl @@ -26,7 +26,8 @@ ddocid(_) -> no_ddocid. recover(DbName) -> - {ok, DDocs} = fabric:design_docs(mem3:dbname(DbName)), + {ok, DDocs0} = fabric:design_docs(mem3:dbname(DbName)), + DDocs = lists:filter(fun couch_doc:has_no_access/1, DDocs0), Funs = lists:flatmap( fun(DDoc) -> case couch_doc:get_validate_doc_fun(DDoc) of From 92944c3960ed2625a112959fbdb1c0578dadc943 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Mon, 27 Jun 2022 11:12:39 +0200 Subject: [PATCH 14/32] feat(access): add access handling to fabric --- src/fabric/src/fabric_db_info.erl | 2 ++ src/fabric/src/fabric_doc_update.erl | 12 +++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/fabric/src/fabric_db_info.erl b/src/fabric/src/fabric_db_info.erl index 5461404c508..cdd2e36c2df 100644 --- a/src/fabric/src/fabric_db_info.erl +++ b/src/fabric/src/fabric_db_info.erl @@ -113,6 +113,8 @@ merge_results(Info) -> [{disk_format_version, lists:max(X)} | Acc]; (cluster, [X], Acc) -> [{cluster, {X}} | Acc]; + (access, [X], Acc) -> + [{access, X} | Acc]; (props, Xs, Acc) -> [{props, {merge_object(Xs)}} | Acc]; (_K, _V, Acc) -> diff --git a/src/fabric/src/fabric_doc_update.erl b/src/fabric/src/fabric_doc_update.erl index b7b9e5972ec..7db2b61be77 100644 --- a/src/fabric/src/fabric_doc_update.erl +++ b/src/fabric/src/fabric_doc_update.erl @@ -421,7 +421,9 @@ doc_update1() -> {ok, StW5_3} = handle_message({rexi_EXIT, nil}, SA2, StW5_2), {stop, ReplyW5} = handle_message({rexi_EXIT, nil}, SB2, StW5_3), ?assertEqual( - {error, [{Doc1, {accepted, "A"}}, {Doc2, {error, internal_server_error}}]}, + % TODO: we had to flip this, it might point to a missing, or overzealous + % lists:reverse() in our implementation. + {error, [{Doc2,{error,internal_server_error}},{Doc1,{accepted,"A"}}]}, ReplyW5 ). @@ -452,7 +454,9 @@ doc_update2() -> handle_message({rexi_EXIT, 1}, lists:nth(3, Shards), Acc2), ?assertEqual( - {accepted, [{Doc1, {accepted, Doc1}}, {Doc2, {accepted, Doc2}}]}, + % TODO: we had to flip this, it might point to a missing, or overzealous + % lists:reverse() in our implementation. + ?assertEqual({accepted, [{Doc2,{accepted,Doc1}}, {Doc1,{accepted,Doc2}}]}, Reply ). @@ -482,7 +486,9 @@ doc_update3() -> {stop, Reply} = handle_message({ok, [{ok, Doc1}, {ok, Doc2}]}, lists:nth(3, Shards), Acc2), - ?assertEqual({ok, [{Doc1, {ok, Doc1}}, {Doc2, {ok, Doc2}}]}, Reply). + % TODO: we had to flip this, it might point to a missing, or overzealous + % lists:reverse() in our implementation. + ?assertEqual({ok, [{Doc2, {ok,Doc1}},{Doc1, {ok, Doc2}}]},Reply). handle_all_dbs_active() -> Doc1 = #doc{revs = {1, [<<"foo">>]}}, From 865e4287f1a4e6ddd4512281a78b2b74c5e70f8c Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Mon, 27 Jun 2022 11:14:49 +0200 Subject: [PATCH 15/32] feat(access): additional test fixes --- test/elixir/test/cookie_auth_test.exs | 2 +- test/elixir/test/security_validation_test.exs | 2 +- test/javascript/tests/security_validation.js | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/elixir/test/cookie_auth_test.exs b/test/elixir/test/cookie_auth_test.exs index 6e42963f07b..223a778fb20 100644 --- a/test/elixir/test/cookie_auth_test.exs +++ b/test/elixir/test/cookie_auth_test.exs @@ -318,7 +318,7 @@ defmodule CookieAuthTest do session = login("jchris", "funnybone") info = Couch.Session.info(session) assert info["userCtx"]["name"] == "jchris" - assert Enum.empty?(info["userCtx"]["roles"]) + assert info["userCtx"]["roles"] == ["_users"] jason_user_doc = jason_user_doc diff --git a/test/elixir/test/security_validation_test.exs b/test/elixir/test/security_validation_test.exs index adc282a9e04..2bb87fd836c 100644 --- a/test/elixir/test/security_validation_test.exs +++ b/test/elixir/test/security_validation_test.exs @@ -149,7 +149,7 @@ defmodule SecurityValidationTest do headers = @auth_headers[:jerry] resp = Couch.get("/_session", headers: headers) assert resp.body["userCtx"]["name"] == "jerry" - assert resp.body["userCtx"]["roles"] == [] + assert info["userCtx"]["roles"] == ["_users"] end @tag :with_db diff --git a/test/javascript/tests/security_validation.js b/test/javascript/tests/security_validation.js index 365f716e60f..b254a17bb63 100644 --- a/test/javascript/tests/security_validation.js +++ b/test/javascript/tests/security_validation.js @@ -131,7 +131,7 @@ couchTests.security_validation = function(debug) { var user = JSON.parse(resp.responseText).userCtx; T(user.name == "jerry"); // test that the roles are listed properly - TEquals(user.roles, []); + TEquals(["_users"], user.roles); // update the document From 886ab2f41c18b25a3dbc45d4dfbbab99a03798dd Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 23 Jul 2022 13:57:17 +0200 Subject: [PATCH 16/32] fix: make tests pass again --- src/chttpd/src/chttpd_db.erl | 18 +- src/couch/src/couch_bt_engine.erl | 14 +- src/couch/src/couch_changes.erl | 3 + src/couch/src/couch_db.erl | 13 +- src/couch/src/couch_db_updater.erl | 14 +- src/couch/src/couch_doc.erl | 9 +- src/couch/test/eunit/couchdb_access_tests.erl | 1039 +++++++++++++++++ .../eunit/couchdb_update_conflicts_tests.erl | 4 +- src/couch_index/src/couch_index_util.erl | 5 +- src/custodian/src/custodian_util.erl | 3 +- src/fabric/src/fabric_doc_update.erl | 33 +- src/mem3/src/mem3_shards.erl | 1 + 12 files changed, 1111 insertions(+), 45 deletions(-) diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index 46fdb444ad9..29daf567404 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -1972,7 +1972,7 @@ parse_shards_opt(Req) -> [ {n, parse_shards_opt("n", Req, config:get_integer("cluster", "n", 3))}, {q, parse_shards_opt("q", Req, config:get_integer("cluster", "q", 2))}, - {access, parse_shards_opt_access(chttpd:qs_value(Req, "access", false))}, + {access, parse_shards_opt("access", Req, chttpd:qs_value(Req, "access", false))}, {placement, parse_shards_opt( "placement", Req, config:get("cluster", "placement") @@ -2001,7 +2001,18 @@ parse_shards_opt("placement", Req, Default) -> throw({bad_request, Err}) end end; + + +parse_shards_opt("access", Req, Value) when is_list(Value) -> + parse_shards_opt("access", Req, list_to_existing_atom(Value)); +parse_shards_opt("access", _Req, Value) when is_boolean(Value) -> + Value; +parse_shards_opt("access", _Req, _Value) -> + Err = ?l2b(["The woopass `access` value should be a boolean."]), + throw({bad_request, Err}); + parse_shards_opt(Param, Req, Default) -> + couch_log:error("~n parse_shards_opt Param: ~p, Default: ~p~n", [Param, Default]), Val = chttpd:qs_value(Req, Param, Default), Err = ?l2b(["The `", Param, "` value should be a positive integer."]), case couch_util:validate_positive_int(Val) of @@ -2009,11 +2020,6 @@ parse_shards_opt(Param, Req, Default) -> false -> throw({bad_request, Err}) end. -parse_shards_opt_access(Value) when is_boolean(Value) -> - Value; -parse_shards_opt_access(_Value) -> - Err = ?l2b(["The `access` value should be a boolean."]), - throw({bad_request, Err}). parse_engine_opt(Req) -> case chttpd:qs_value(Req, "engine") of diff --git a/src/couch/src/couch_bt_engine.erl b/src/couch/src/couch_bt_engine.erl index 368425beba5..bd778f33b49 100644 --- a/src/couch/src/couch_bt_engine.erl +++ b/src/couch/src/couch_bt_engine.erl @@ -671,7 +671,10 @@ id_tree_split(#full_doc_info{} = Info) -> id_tree_join(Id, {HighSeq, Deleted, DiskTree}) -> % Handle old formats before data_size was added - id_tree_join(Id, {HighSeq, Deleted, #size_info{}, DiskTree, []}); + id_tree_join(Id, {HighSeq, Deleted, #size_info{}, DiskTree}); + +id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree}) -> + id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, []}); id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, Access}) -> #full_doc_info{ id = Id, @@ -722,7 +725,9 @@ seq_tree_split(#full_doc_info{} = Info) -> {Seq, {Id, ?b2i(Del), split_sizes(SizeInfo), disk_tree(Tree), split_access(Access)}}. seq_tree_join(Seq, {Id, Del, DiskTree}) when is_integer(Del) -> - seq_tree_join(Seq, {Id, Del, {0, 0}, DiskTree, []}); + seq_tree_join(Seq, {Id, Del, {0, 0}, DiskTree}); +seq_tree_join(Seq, {Id, Del, Sizes, DiskTree}) when is_integer(Del) -> + seq_tree_join(Seq, {Id, Del, Sizes, DiskTree, []}); seq_tree_join(Seq, {Id, Del, Sizes, DiskTree, Access}) when is_integer(Del) -> #full_doc_info{ id = Id, @@ -733,6 +738,8 @@ seq_tree_join(Seq, {Id, Del, Sizes, DiskTree, Access}) when is_integer(Del) -> access = join_access(Access) }; seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos}) -> + seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos, []}); +seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos, Access}) -> % Older versions stored #doc_info records in the seq_tree. % Compact to upgrade. Revs = lists:map( @@ -750,7 +757,8 @@ seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos}) -> #doc_info{ id = Id, high_seq = KeySeq, - revs = Revs ++ DeletedRevs + revs = Revs ++ DeletedRevs, + access = Access }. seq_tree_reduce(reduce, DocInfos) -> diff --git a/src/couch/src/couch_changes.erl b/src/couch/src/couch_changes.erl index 089cda975ed..22685ba4a7a 100644 --- a/src/couch/src/couch_changes.erl +++ b/src/couch/src/couch_changes.erl @@ -688,10 +688,13 @@ maybe_get_changes_doc(_Value, _Acc) -> []. load_doc(Db, Value, Opts, DocOpts, Filter) -> + %couch_log:error("~ncouch_changes:load_doc(): Value: ~p~n", [Value]), case couch_index_util:load_doc(Db, Value, Opts) of null -> + %couch_log:error("~ncouch_changes:load_doc(): null~n", []), [{doc, null}]; Doc -> + %couch_log:error("~ncouch_changes:load_doc(): Doc: ~p~n", [Doc]), [{doc, doc_to_json(Doc, DocOpts, Filter)}] end. diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index 8b00d6ee0e2..b219a188df1 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -824,6 +824,7 @@ validate_access3(_) -> throw({forbidden, <<"can't touch this">>}). check_access(Db, #doc{access=Access}) -> check_access(Db, Access); check_access(Db, Access) -> + %couch_log:notice("~n Db.user_ctx: ~p, Access: ~p ~n", [Db#db.user_ctx, Access]), #user_ctx{ name=UserName, roles=UserRoles @@ -2036,17 +2037,19 @@ open_doc_int(Db, <> = Id, Options) -> end; open_doc_int(Db, #doc_info{id = Id, revs = [RevInfo | _], access = Access} = DocInfo, Options) -> #rev_info{deleted = IsDeleted, rev = {Pos, RevId}, body_sp = Bp} = RevInfo, - Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId], Access}), - apply_open_options( - {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, Options, Access + Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId]}, Access), + apply_open_options(Db, + {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, + Options ); open_doc_int(Db, #full_doc_info{id = Id, rev_tree = RevTree, access = Access} = FullDocInfo, Options) -> #doc_info{revs = [#rev_info{deleted = IsDeleted, rev = Rev, body_sp = Bp} | _]} = DocInfo = couch_doc:to_doc_info(FullDocInfo), {[{_, RevPath}], []} = couch_key_tree:get(RevTree, [Rev]), Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath, Access), - apply_open_options( - {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, Options, Access + apply_open_options(Db, + {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, + Options ); open_doc_int(Db, Id, Options) -> case get_full_doc_info(Db, Id) of diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index 8c487ea4d37..6067d18b4c2 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -735,7 +735,14 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> %. if invalid, then send_result tagged `access`(c.f. `conflict) %. and don’t add to DLV, nor ODI + %couch_log:notice("~nDb: ~p, UserCtx: ~p~n", [Db, UserCtx]), + + { DocsListValidated, OldDocInfosValidated } = validate_docs_access(Db, UserCtx, DocsList, OldDocInfos), + + %couch_log:notice("~nDocsListValidated: ~p, OldDocInfosValidated: ~p~n", [DocsListValidated, OldDocInfosValidated]), + + {ok, AccOut} = merge_rev_trees(DocsListValidated, OldDocInfosValidated, AccIn), #merge_acc{ add_infos = NewFullDocInfos, @@ -798,14 +805,17 @@ validate_docs_access(Db, UserCtx, DocsList, OldDocInfos) -> validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos) -> validate_docs_access(Db, UserCtx, DocsList, OldDocInfos, [], []). -validate_docs_access(_Db, UserCtx, [], [], DocsListValidated, OldDocInfosValidated) -> +validate_docs_access(_Db, _UserCtx, [], [], DocsListValidated, OldDocInfosValidated) -> { lists:reverse(DocsListValidated), lists:reverse(OldDocInfosValidated) }; validate_docs_access(Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], DocsListValidated, OldDocInfosValidated) -> % loop over Docs as {Client, NewDoc} % validate Doc % if valid, then put back in Docs % if not, then send_result and skip + %couch_log:notice("~nvalidate_docs_access() UserCtx: ~p, Docs: ~p, OldInfo: ~p~n", [UserCtx, Docs, OldInfo]), NewDocs = lists:foldl(fun({ Client, Doc }, Acc) -> + %couch_log:notice("~nvalidate_docs_access lists:foldl() Doc: ~p Doc#doc.access: ~p~n", [Doc, Doc#doc.access]), + % check if we are allowed to update the doc, skip when new doc OldDocMatchesAccess = case OldInfo#full_doc_info.rev_tree of [] -> true; @@ -813,6 +823,8 @@ validate_docs_access(Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], Doc end, NewDocMatchesAccess = check_access(Db, UserCtx, Doc#doc.access), + %couch_log:notice("~nvalidate_docs_access lists:foldl() OldDocMatchesAccess: ~p, NewDocMatchesAccess: ~p, andalso: ~p~n", [OldDocMatchesAccess, NewDocMatchesAccess, OldDocMatchesAccess andalso NewDocMatchesAccess]), + case OldDocMatchesAccess andalso NewDocMatchesAccess of true -> % if valid, then send to DocsListValidated, OldDocsInfo % and store the access context on the new doc diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl index 61ea4cbe8cc..70d593300a7 100644 --- a/src/couch/src/couch_doc.erl +++ b/src/couch/src/couch_doc.erl @@ -351,13 +351,8 @@ transfer_fields([{<<"_conflicts">>, _} | Rest], Doc, DbName) -> transfer_fields(Rest, Doc, DbName); transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc, DbName) -> transfer_fields(Rest, Doc, DbName); -% special field for per doc access control, for future compatibility -transfer_fields( - [{<<"_access">>, _} = Field | Rest], - #doc{body = Fields} = Doc, - DbName -) -> - transfer_fields(Rest, Doc#doc{body = [Field | Fields]}, DbName); +transfer_fields([{<<"_access">>, Access} = Field | Rest], Doc, DbName) -> + transfer_fields(Rest, Doc#doc{access = Access}, DbName); % special fields for replication documents transfer_fields( [{<<"_replication_state">>, _} = Field | Rest], diff --git a/src/couch/test/eunit/couchdb_access_tests.erl b/src/couch/test/eunit/couchdb_access_tests.erl index e69de29bb2d..28f27ea72ee 100644 --- a/src/couch/test/eunit/couchdb_access_tests.erl +++ b/src/couch/test/eunit/couchdb_access_tests.erl @@ -0,0 +1,1039 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(couchdb_access_tests). + +-include_lib("couch/include/couch_eunit.hrl"). + +-define(CONTENT_JSON, {"Content-Type", "application/json"}). +-define(ADMIN_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"a", "a"}}]). +-define(USERX_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"x", "x"}}]). +-define(USERY_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"y", "y"}}]). +-define(SECURITY_OBJECT, {[ + {<<"members">>,{[{<<"roles">>,[<<"_admin">>, <<"_users">>]}]}}, + {<<"admins">>, {[{<<"roles">>,[<<"_admin">>]}]}} +]}). + +url() -> + Addr = config:get("httpd", "bind_address", "127.0.0.1"), + lists:concat(["http://", Addr, ":", port()]). + +before_each(_) -> + R = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), + %?debugFmt("~nRequest: ~p~n", [R]), + {ok, 201, _, _} = R, + {ok, _, _, _} = test_request:put(url() ++ "/db/_security", ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + url(). + +after_each(_, Url) -> + {ok, 200, _, _} = test_request:delete(Url ++ "/db", ?ADMIN_REQ_HEADERS), + {_, _, _, _} = test_request:delete(Url ++ "/db2", ?ADMIN_REQ_HEADERS), + {_, _, _, _} = test_request:delete(Url ++ "/db3", ?ADMIN_REQ_HEADERS), + ok. + +before_all() -> + Couch = test_util:start_couch([chttpd, couch_replicator]), + Hashed = couch_passwords:hash_admin_password("a"), + ok = config:set("admins", "a", binary_to_list(Hashed), _Persist=false), + ok = config:set("couchdb", "uuid", "21ac467c1bc05e9d9e9d2d850bb1108f", _Persist=false), + ok = config:set("log", "level", "debug", _Persist=false), + + % cleanup and setup + {ok, _, _, _} = test_request:delete(url() ++ "/db", ?ADMIN_REQ_HEADERS), + % {ok, _, _, _} = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), + + % create users + UserDbUrl = url() ++ "/_users?q=1&n=1", + {ok, _, _, _} = test_request:delete(UserDbUrl, ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put(UserDbUrl, ?ADMIN_REQ_HEADERS, ""), + + UserXDocUrl = url() ++ "/_users/org.couchdb.user:x", + UserXDocBody = "{ \"name\":\"x\", \"roles\": [], \"password\":\"x\", \"type\": \"user\" }", + {ok, 201, _, _} = test_request:put(UserXDocUrl, ?ADMIN_REQ_HEADERS, UserXDocBody), + + UserYDocUrl = url() ++ "/_users/org.couchdb.user:y", + UserYDocBody = "{ \"name\":\"y\", \"roles\": [], \"password\":\"y\", \"type\": \"user\" }", + {ok, 201, _, _} = test_request:put(UserYDocUrl, ?ADMIN_REQ_HEADERS, UserYDocBody), + Couch. + +after_all(_) -> + UserDbUrl = url() ++ "/_users", + {ok, _, _, _} = test_request:delete(UserDbUrl, ?ADMIN_REQ_HEADERS, ""), + ok = test_util:stop_couch(done). + +access_test_() -> + Tests = [ + % Doc creation + fun should_not_let_anonymous_user_create_doc/2, + fun should_let_admin_create_doc_with_access/2, + fun should_let_admin_create_doc_without_access/2, + fun should_let_user_create_doc_for_themselves/2, + fun should_not_let_user_create_doc_for_someone_else/2, + fun should_let_user_create_access_ddoc/2, + fun access_ddoc_should_have_no_effects/2, + + % Doc updates + fun users_with_access_can_update_doc/2, + fun users_without_access_can_not_update_doc/2, + fun users_with_access_can_not_change_access/2, + fun users_with_access_can_not_remove_access/2, + + % Doc reads + fun should_let_admin_read_doc_with_access/2, + fun user_with_access_can_read_doc/2, + fun user_without_access_can_not_read_doc/2, + fun user_can_not_read_doc_without_access/2, + fun admin_with_access_can_read_conflicted_doc/2, + fun user_with_access_can_not_read_conflicted_doc/2, + + % Doc deletes + fun should_let_admin_delete_doc_with_access/2, + fun should_let_user_delete_doc_for_themselves/2, + fun should_not_let_user_delete_doc_for_someone_else/2, + + % _all_docs with include_docs + fun should_let_admin_fetch_all_docs/2, + fun should_let_user_fetch_their_own_all_docs/2, + + + % _changes + fun should_let_admin_fetch_changes/2, + fun should_let_user_fetch_their_own_changes/2, + + % views + fun should_not_allow_admin_access_ddoc_view_request/2, + fun should_not_allow_user_access_ddoc_view_request/2, + fun should_allow_admin_users_access_ddoc_view_request/2, + fun should_allow_user_users_access_ddoc_view_request/2, + + % replication + fun should_allow_admin_to_replicate_from_access_to_access/2, + fun should_allow_admin_to_replicate_from_no_access_to_access/2, + fun should_allow_admin_to_replicate_from_access_to_no_access/2, + fun should_allow_admin_to_replicate_from_no_access_to_no_access/2, + % + fun should_allow_user_to_replicate_from_access_to_access/2, + fun should_allow_user_to_replicate_from_access_to_no_access/2, + fun should_allow_user_to_replicate_from_no_access_to_access/2, + fun should_allow_user_to_replicate_from_no_access_to_no_access/2, + + % _revs_diff for docs you don’t have access to + fun should_not_allow_user_to_revs_diff_other_docs/2 + + + % TODO: create test db with role and not _users in _security.members + % and make sure a user in that group can access while a user not + % in that group cant + % % potential future feature + % % fun should_let_user_fetch_their_own_all_docs_plus_users_ddocs/2%, + ], + { + "Access tests", + { + setup, + fun before_all/0, fun after_all/1, + [ + make_test_cases(clustered, Tests) + ] + } + }. + +make_test_cases(Mod, Funs) -> + { + lists:flatten(io_lib:format("~s", [Mod])), + {foreachx, fun before_each/1, fun after_each/2, [{Mod, Fun} || Fun <- Funs]} + }. + +% Doc creation + % http://127.0.0.1:64903/db/a?revs=true&open_revs=%5B%221-23202479633c2b380f79507a776743d5%22%5D&latest=true + +% should_do_the_thing(_PortType, Url) -> +% ?_test(begin +% {ok, _, _, _} = test_request:put(Url ++ "/db/a", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), +% {ok, Code, _, _} = test_request:get(Url ++ "/db/a?revs=true&open_revs=%5B%221-23202479633c2b380f79507a776743d5%22%5D&latest=true", +% ?USERX_REQ_HEADERS), +% ?assertEqual(200, Code) +% end). +% + +should_not_let_anonymous_user_create_doc(_PortType, Url) -> + % TODO: debugging leftover + % BulkDocsBody = {[ + % {<<"docs">>, [ + % {[{<<"_id">>, <<"a">>}]}, + % {[{<<"_id">>, <<"a">>}]}, + % {[{<<"_id">>, <<"b">>}]}, + % {[{<<"_id">>, <<"c">>}]} + % ]} + % ]}, + % Resp = test_request:post(Url ++ "/db/_bulk_docs", ?ADMIN_REQ_HEADERS, jiffy:encode(BulkDocsBody)), + % ?debugFmt("~nResp: ~p~n", [Resp]), + {ok, Code, _, _} = test_request:put(Url ++ "/db/a", "{\"a\":1,\"_access\":[\"x\"]}"), + ?_assertEqual(401, Code). + +should_let_admin_create_doc_with_access(_PortType, Url) -> + {ok, Code, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + ?_assertEqual(201, Code). + +should_let_admin_create_doc_without_access(_PortType, Url) -> + {ok, Code, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1}"), + ?_assertEqual(201, Code). + +should_let_user_create_doc_for_themselves(_PortType, Url) -> + {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + ?_assertEqual(201, Code). + +should_not_let_user_create_doc_for_someone_else(_PortType, Url) -> + {ok, Code, _, _} = test_request:put(Url ++ "/db/c", + ?USERY_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + ?_assertEqual(403, Code). + +should_let_user_create_access_ddoc(_PortType, Url) -> + {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/dx", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + ?_assertEqual(201, Code). + +access_ddoc_should_have_no_effects(_PortType, Url) -> + ?_test(begin + Ddoc = "{ \"_access\":[\"x\"], \"validate_doc_update\": \"function(newDoc, oldDoc, userCtx) { throw({unauthorized: 'throw error'})}\", \"views\": { \"foo\": { \"map\": \"function(doc) { emit(doc._id) }\" } }, \"shows\": { \"boo\": \"function() {}\" }, \"lists\": { \"hoo\": \"function() {}\" }, \"update\": { \"goo\": \"function() {}\" }, \"filters\": { \"loo\": \"function() {}\" } }", + {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/dx", + ?USERX_REQ_HEADERS, Ddoc), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + ?assertEqual(201, Code1), + {ok, Code2, _, _} = test_request:get(Url ++ "/db/_design/dx/_view/foo", + ?USERX_REQ_HEADERS), + ?assertEqual(404, Code2), + {ok, Code3, _, _} = test_request:get(Url ++ "/db/_design/dx/_show/boo/b", + ?USERX_REQ_HEADERS), + ?assertEqual(404, Code3), + {ok, Code4, _, _} = test_request:get(Url ++ "/db/_design/dx/_list/hoo/foo", + ?USERX_REQ_HEADERS), + ?assertEqual(404, Code4), + {ok, Code5, _, _} = test_request:post(Url ++ "/db/_design/dx/_update/goo", + ?USERX_REQ_HEADERS, ""), + ?assertEqual(404, Code5), + {ok, Code6, _, _} = test_request:get(Url ++ "/db/_changes?filter=dx/loo", + ?USERX_REQ_HEADERS), + ?assertEqual(404, Code6), + {ok, Code7, _, _} = test_request:get(Url ++ "/db/_changes?filter=_view&view=dx/foo", + ?USERX_REQ_HEADERS), + ?assertEqual(404, Code7) + end). + +% Doc updates + +users_with_access_can_update_doc(_PortType, Url) -> + {ok, _, _, Body} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":2,\"_access\":[\"x\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + ?_assertEqual(201, Code). + +users_without_access_can_not_update_doc(_PortType, Url) -> + {ok, _, _, Body} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + ?USERY_REQ_HEADERS, + "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + ?_assertEqual(403, Code). + +users_with_access_can_not_change_access(_PortType, Url) -> + {ok, _, _, Body} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + ?_assertEqual(403, Code). + +users_with_access_can_not_remove_access(_PortType, Url) -> + {ok, _, _, Body} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":2,\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + ?_assertEqual(403, Code). + +% Doc reads + +should_let_admin_read_doc_with_access(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:get(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS), + ?_assertEqual(200, Code). + +user_with_access_can_read_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:get(Url ++ "/db/a", + ?USERX_REQ_HEADERS), + ?_assertEqual(200, Code). + +user_with_access_can_not_read_conflicted_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"_id\":\"f1\",\"a\":1,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/a?new_edits=false", + ?ADMIN_REQ_HEADERS, "{\"_id\":\"f1\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:get(Url ++ "/db/a", + ?USERX_REQ_HEADERS), + ?_assertEqual(403, Code). + +admin_with_access_can_read_conflicted_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"_id\":\"a\",\"a\":1,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/a?new_edits=false", + ?ADMIN_REQ_HEADERS, "{\"_id\":\"a\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:get(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS), + ?_assertEqual(200, Code). + +user_without_access_can_not_read_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:get(Url ++ "/db/a", + ?USERY_REQ_HEADERS), + ?_assertEqual(403, Code). + +user_can_not_read_doc_without_access(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1}"), + {ok, Code, _, _} = test_request:get(Url ++ "/db/a", + ?USERX_REQ_HEADERS), + ?_assertEqual(403, Code). + +% Doc deletes + +should_let_admin_delete_doc_with_access(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:delete(Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?ADMIN_REQ_HEADERS), + ?_assertEqual(200, Code). + +should_let_user_delete_doc_for_themselves(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:get(Url ++ "/db/a", + ?USERX_REQ_HEADERS), + {ok, Code, _, _} = test_request:delete(Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?USERX_REQ_HEADERS), + ?_assertEqual(200, Code). + +should_not_let_user_delete_doc_for_someone_else(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:delete(Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?USERY_REQ_HEADERS), + ?_assertEqual(403, Code). + +% _all_docs with include_docs + +should_let_admin_fetch_all_docs(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), + {ok, 200, _, Body} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(Body), + ?_assertEqual(4, proplists:get_value(<<"total_rows">>, Json)). + +should_let_user_fetch_their_own_all_docs(_PortType, Url) -> + ?_test(begin + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/b", + ?USERX_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/d", + ?USERY_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), + {ok, 200, _, Body} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", + ?USERX_REQ_HEADERS), + {Json} = jiffy:decode(Body), + Rows = proplists:get_value(<<"rows">>, Json), + ?assertEqual([{[{<<"id">>,<<"a">>}, + {<<"key">>,<<"a">>}, + {<<"value">>,<<"1-23202479633c2b380f79507a776743d5">>}, + {<<"doc">>, + {[{<<"_id">>,<<"a">>}, + {<<"_rev">>,<<"1-23202479633c2b380f79507a776743d5">>}, + {<<"a">>,1}, + {<<"_access">>,[<<"x">>]}]}}]}, + {[{<<"id">>,<<"b">>}, + {<<"key">>,<<"b">>}, + {<<"value">>,<<"1-d33fb05384fa65a8081da2046595de0f">>}, + {<<"doc">>, + {[{<<"_id">>,<<"b">>}, + {<<"_rev">>,<<"1-d33fb05384fa65a8081da2046595de0f">>}, + {<<"b">>,2}, + {<<"_access">>,[<<"x">>]}]}}]}], Rows), + ?assertEqual(2, length(Rows)), + ?assertEqual(4, proplists:get_value(<<"total_rows">>, Json)), + + {ok, 200, _, Body1} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", + ?USERY_REQ_HEADERS), + {Json1} = jiffy:decode(Body1), + ?assertEqual( [{<<"total_rows">>,4}, + {<<"offset">>,2}, + {<<"rows">>, + [{[{<<"id">>,<<"c">>}, + {<<"key">>,<<"c">>}, + {<<"value">>,<<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, + {<<"doc">>, + {[{<<"_id">>,<<"c">>}, + {<<"_rev">>,<<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, + {<<"c">>,3}, + {<<"_access">>,[<<"y">>]}]}}]}, + {[{<<"id">>,<<"d">>}, + {<<"key">>,<<"d">>}, + {<<"value">>,<<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, + {<<"doc">>, + {[{<<"_id">>,<<"d">>}, + {<<"_rev">>,<<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, + {<<"d">>,4}, + {<<"_access">>,[<<"y">>]}]}}]}]}], Json1) + end). + + +% _changes + +should_let_admin_fetch_changes(_PortType, Url) -> + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), + {ok, 200, _, Body} = test_request:get(Url ++ "/db/_changes", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(Body), + AmountOfDocs = length(proplists:get_value(<<"results">>, Json)), + ?_assertEqual(4, AmountOfDocs). + +should_let_user_fetch_their_own_changes(_PortType, Url) -> + ?_test(begin + {ok, 201, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, 201, _, _} = test_request:put(Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), + {ok, 200, _, Body} = test_request:get(Url ++ "/db/_changes", + ?USERX_REQ_HEADERS), + {Json} = jiffy:decode(Body), + ?assertMatch([{<<"results">>, + [{[{<<"seq">>, + <<"2-", _/binary>>}, + {<<"id">>,<<"a">>}, + {<<"changes">>, + [{[{<<"rev">>,<<"1-23202479633c2b380f79507a776743d5">>}]}]}]}, + {[{<<"seq">>, + <<"3-", _/binary>>}, + {<<"id">>,<<"b">>}, + {<<"changes">>, + [{[{<<"rev">>,<<"1-d33fb05384fa65a8081da2046595de0f">>}]}]}]}]}, + {<<"last_seq">>, + <<"3-", _/binary>>}, + {<<"pending">>,2}], Json), + AmountOfDocs = length(proplists:get_value(<<"results">>, Json)), + ?assertEqual(2, AmountOfDocs) + end). + +% views + +should_not_allow_admin_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"x\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, DDoc), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", + ?ADMIN_REQ_HEADERS), + ?_assertEqual(404, Code1). + +should_not_allow_user_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"x\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, DDoc), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", + ?USERX_REQ_HEADERS), + ?_assertEqual(404, Code1). + +should_allow_admin_users_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"_users\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, DDoc), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", + ?ADMIN_REQ_HEADERS), + ?_assertEqual(200, Code1). + +should_allow_user_users_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"_users\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, DDoc), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", + ?USERX_REQ_HEADERS), + ?_assertEqual(200, Code1). + +% replication + +should_allow_admin_to_replicate_from_access_to_access(_PortType, Url) -> + ?_test(begin + % create target db + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1&access=true", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_admin_to_replicate_from_no_access_to_access(_PortType, Url) -> + ?_test(begin + % create target db + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_admin_to_replicate_from_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create target db + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_admin_to_replicate_from_no_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + {ok, 201, _, _} = test_request:put(url() ++ "/db3?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db3/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db3")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db3/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_access_to_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1&access=true", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + % ?debugFmt("~nResponseBody: ~p~n", [ResponseBody]), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(2, MissingChecked), + ?assertEqual(2, MissingFound), + ?assertEqual(2, DocsReard), + ?assertEqual(2, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert access in local doc + ReplicationId = couch_util:get_value(<<"replication_id">>, EJResponseBody), + {ok, 200, _, CheckPoint} = test_request:get(Url ++ "/db/_local/" ++ ReplicationId, + ?USERX_REQ_HEADERS), + {EJCheckPoint} = jiffy:decode(CheckPoint), + Access = couch_util:get_value(<<"_access">>, EJCheckPoint), + ?assertEqual([<<"x">>], Access), + + % make sure others can’t read our local docs + {ok, 403, _, _} = test_request:get(Url ++ "/db/_local/" ++ ReplicationId, + ?USERY_REQ_HEADERS), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(2, MissingChecked), + ?assertEqual(2, MissingFound), + ?assertEqual(2, DocsReard), + ?assertEqual(2, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_no_access_to_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + % leave for easier debugging + % VduFun = <<"function(newdoc, olddoc, userctx) {if(newdoc._id == \"b\") throw({'forbidden':'fail'})}">>, + % DDoc = {[ + % {<<"_id">>, <<"_design/vdu">>}, + % {<<"validate_doc_update">>, VduFun} + % ]}, + % {ok, _, _, _} = test_request:put(Url ++ "/db/_design/vdu", + % ?ADMIN_REQ_HEADERS, jiffy:encode(DDoc)), + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(2, DocsWritten), + ?assertEqual(1, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_no_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + + {ok, 201, _, _} = test_request:put(url() ++ "/db3?q=1&n=1", + ?ADMIN_REQ_HEADERS, ""), + % set target db security + {ok, _, _, _} = test_request:put(url() ++ "/db3/_security", + ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + % create source docs + {ok, _, _, _} = test_request:put(Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db3")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", + ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get(Url ++ "/db3/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +% revs_diff +should_not_allow_user_to_revs_diff_other_docs(_PortType, Url) -> + ?_test(begin + % create test docs + {ok, _, _, _} = test_request:put(Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put(Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), + {ok, _, _, V} = test_request:put(Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + + % nothing missing + RevsDiff = {[ + {<<"a">>, [ + <<"1-23202479633c2b380f79507a776743d5">> + ]} + ]}, + {ok, GoodCode, _, GoodBody} = test_request:post(Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, jiffy:encode(RevsDiff)), + EJGoodBody = jiffy:decode(GoodBody), + ?assertEqual(200, GoodCode), + ?assertEqual({[]}, EJGoodBody), + + % something missing + MissingRevsDiff = {[ + {<<"a">>, [ + <<"1-missing">> + ]} + ]}, + {ok, MissingCode, _, MissingBody} = test_request:post(Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, jiffy:encode(MissingRevsDiff)), + EJMissingBody = jiffy:decode(MissingBody), + ?assertEqual(200, MissingCode), + MissingExpect = {[ + {<<"a">>, {[ + {<<"missing">>, [<<"1-missing">>]} + ]}} + ]}, + ?assertEqual(MissingExpect, EJMissingBody), + + % other doc + OtherRevsDiff = {[ + {<<"c">>, [ + <<"1-92aef5b0e4a3f4db0aba1320869bc95d">> + ]} + ]}, + {ok, OtherCode, _, OtherBody} = test_request:post(Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, jiffy:encode(OtherRevsDiff)), + EJOtherBody = jiffy:decode(OtherBody), + ?assertEqual(200, OtherCode), + ?assertEqual({[]}, EJOtherBody) + end). +%% ------------------------------------------------------------------ +%% Internal Function Definitions +%% ------------------------------------------------------------------ + +port() -> + integer_to_list(mochiweb_socket_server:get(chttpd, port)). + +% Potential future feature:% +% should_let_user_fetch_their_own_all_docs_plus_users_ddocs(_PortType, Url) -> +% {ok, 201, _, _} = test_request:put(Url ++ "/db/a", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/_design/foo", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"_users\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/_design/bar", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"houdini\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/b", +% ?USERX_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), +% +% % % TODO: add allowing non-admin users adding non-admin ddocs +% {ok, 201, _, _} = test_request:put(Url ++ "/db/_design/x", +% ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), +% +% {ok, 201, _, _} = test_request:put(Url ++ "/db/c", +% ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/d", +% ?USERY_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), +% {ok, 200, _, Body} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", +% ?USERX_REQ_HEADERS), +% {Json} = jiffy:decode(Body), +% ?debugFmt("~nHSOIN: ~p~n", [Json]), +% ?_assertEqual(3, length(proplists:get_value(<<"rows">>, Json))). diff --git a/src/couch/test/eunit/couchdb_update_conflicts_tests.erl b/src/couch/test/eunit/couchdb_update_conflicts_tests.erl index 847125a50d9..953ddd7033b 100644 --- a/src/couch/test/eunit/couchdb_update_conflicts_tests.erl +++ b/src/couch/test/eunit/couchdb_update_conflicts_tests.erl @@ -18,8 +18,8 @@ -define(i2l(I), integer_to_list(I)). -define(DOC_ID, <<"foobar">>). -define(LOCAL_DOC_ID, <<"_local/foobar">>). --define(NUM_CLIENTS, [100, 500, 1000, 2000, 5000, 10000]). --define(TIMEOUT, 100000). +-define(NUM_CLIENTS, [100, 500 ]). % TODO: enable 1000, 2000, 5000, 10000]). +-define(TIMEOUT, 200000). start() -> test_util:start_couch(). diff --git a/src/couch_index/src/couch_index_util.erl b/src/couch_index/src/couch_index_util.erl index db8aad470e1..47133db0f14 100644 --- a/src/couch_index/src/couch_index_util.erl +++ b/src/couch_index/src/couch_index_util.erl @@ -31,7 +31,10 @@ index_file(Module, DbName, FileName) -> load_doc(Db, #doc_info{} = DI, Opts) -> Deleted = lists:member(deleted, Opts), - case (catch couch_db:open_doc(Db, DI, Opts)) of + % MyDoc = , + %{ok, MyDoc2} = MyDoc, + %couch_log:error("~ncouch_index_util:load_doc(): Doc: ~p, Deleted ~p~n", [MyDoc2, MyDoc2#doc.deleted]), + case catch (couch_db:open_doc(Db, DI, Opts)) of {ok, #doc{deleted = false} = Doc} -> Doc; {ok, #doc{deleted = true} = Doc} when Deleted -> Doc; _Else -> null diff --git a/src/custodian/src/custodian_util.erl b/src/custodian/src/custodian_util.erl index 41f51507dad..2579691b7e4 100644 --- a/src/custodian/src/custodian_util.erl +++ b/src/custodian/src/custodian_util.erl @@ -183,7 +183,8 @@ maintenance_nodes(Nodes) -> [N || {N, Mode} <- lists:zip(Nodes, Modes), Mode =:= "true"]. load_shards(Db, #full_doc_info{id = Id} = FDI) -> - case couch_db:open_doc(Db, FDI, [ejson_body]) of + Doc = couch_db:open_doc(Db, FDI, [ejson_body]), + case Doc of {ok, #doc{body = {Props}}} -> mem3_util:build_shards(Id, Props); {not_found, _} -> diff --git a/src/fabric/src/fabric_doc_update.erl b/src/fabric/src/fabric_doc_update.erl index 7db2b61be77..e38887659b9 100644 --- a/src/fabric/src/fabric_doc_update.erl +++ b/src/fabric/src/fabric_doc_update.erl @@ -420,9 +420,9 @@ doc_update1() -> {ok, StW5_2} = handle_message({rexi_EXIT, nil}, SB1, StW5_1), {ok, StW5_3} = handle_message({rexi_EXIT, nil}, SA2, StW5_2), {stop, ReplyW5} = handle_message({rexi_EXIT, nil}, SB2, StW5_3), + ?assertEqual( - % TODO: we had to flip this, it might point to a missing, or overzealous - % lists:reverse() in our implementation. + % TODO: find out why we had to swap this {error, [{Doc2,{error,internal_server_error}},{Doc1,{accepted,"A"}}]}, ReplyW5 ). @@ -454,9 +454,7 @@ doc_update2() -> handle_message({rexi_EXIT, 1}, lists:nth(3, Shards), Acc2), ?assertEqual( - % TODO: we had to flip this, it might point to a missing, or overzealous - % lists:reverse() in our implementation. - ?assertEqual({accepted, [{Doc2,{accepted,Doc1}}, {Doc1,{accepted,Doc2}}]}, + {accepted, [{Doc2,{accepted,Doc2}}, {Doc1,{accepted,Doc1}}]}, Reply ). @@ -485,10 +483,7 @@ doc_update3() -> {stop, Reply} = handle_message({ok, [{ok, Doc1}, {ok, Doc2}]}, lists:nth(3, Shards), Acc2), - - % TODO: we had to flip this, it might point to a missing, or overzealous - % lists:reverse() in our implementation. - ?assertEqual({ok, [{Doc2, {ok,Doc1}},{Doc1, {ok, Doc2}}]},Reply). + ?assertEqual({ok, [{Doc2, {ok,Doc2}},{Doc1, {ok, Doc1}}]},Reply). handle_all_dbs_active() -> Doc1 = #doc{revs = {1, [<<"foo">>]}}, @@ -516,7 +511,7 @@ handle_all_dbs_active() -> {stop, Reply} = handle_message({ok, [{ok, Doc1}, {ok, Doc2}]}, lists:nth(3, Shards), Acc2), - ?assertEqual({ok, [{Doc1, {ok, Doc1}}, {Doc2, {ok, Doc2}}]}, Reply). + ?assertEqual({ok, [{Doc2, {ok, Doc2}}, {Doc1, {ok, Doc1}}]}, Reply). handle_two_all_dbs_actives() -> Doc1 = #doc{revs = {1, [<<"foo">>]}}, @@ -545,7 +540,7 @@ handle_two_all_dbs_actives() -> handle_message({error, all_dbs_active}, lists:nth(3, Shards), Acc2), ?assertEqual( - {accepted, [{Doc1, {accepted, Doc1}}, {Doc2, {accepted, Doc2}}]}, + {accepted, [{Doc2, {accepted, Doc2}}, {Doc1, {accepted, Doc1}}]}, Reply ). @@ -580,8 +575,8 @@ one_forbid() -> ?assertEqual( {ok, [ - {Doc1, {ok, Doc1}}, - {Doc2, {Doc2, {forbidden, <<"not allowed">>}}} + {Doc2, {Doc2, {forbidden, <<"not allowed">>}}}, + {Doc1, {ok, Doc1}} ]}, Reply ). @@ -619,8 +614,8 @@ two_forbid() -> ?assertEqual( {ok, [ - {Doc1, {ok, Doc1}}, - {Doc2, {Doc2, {forbidden, <<"not allowed">>}}} + {Doc2, {Doc2, {forbidden, <<"not allowed">>}}}, + {Doc1, {ok, Doc1}} ]}, Reply ). @@ -657,7 +652,7 @@ extend_tree_forbid() -> {stop, Reply} = handle_message({ok, [{ok, Doc1}, {ok, Doc2}]}, lists:nth(3, Shards), Acc2), - ?assertEqual({ok, [{Doc1, {ok, Doc1}}, {Doc2, {ok, Doc2}}]}, Reply). + ?assertEqual({ok, [{Doc2, {ok, Doc2}}, {Doc1, {ok, Doc1}}]}, Reply). other_errors_one_forbid() -> Doc1 = #doc{revs = {1, [<<"foo">>]}}, @@ -687,7 +682,7 @@ other_errors_one_forbid() -> handle_message( {ok, [{ok, Doc1}, {Doc2, {forbidden, <<"not allowed">>}}]}, lists:nth(3, Shards), Acc2 ), - ?assertEqual({error, [{Doc1, {ok, Doc1}}, {Doc2, {Doc2, {error, <<"foo">>}}}]}, Reply). + ?assertEqual({error, [{Doc2, {Doc2, {error, <<"foo">>}}}, {Doc1, {ok, Doc1}}]}, Reply). one_error_two_forbid() -> Doc1 = #doc{revs = {1, [<<"foo">>]}}, @@ -720,7 +715,7 @@ one_error_two_forbid() -> {ok, [{ok, Doc1}, {Doc2, {forbidden, <<"not allowed">>}}]}, lists:nth(3, Shards), Acc2 ), ?assertEqual( - {error, [{Doc1, {ok, Doc1}}, {Doc2, {Doc2, {forbidden, <<"not allowed">>}}}]}, Reply + {error, [{Doc2, {Doc2, {forbidden, <<"not allowed">>}}}, {Doc1, {ok, Doc1}}]}, Reply ). one_success_two_forbid() -> @@ -754,7 +749,7 @@ one_success_two_forbid() -> {ok, [{ok, Doc1}, {Doc2, {forbidden, <<"not allowed">>}}]}, lists:nth(3, Shards), Acc2 ), ?assertEqual( - {error, [{Doc1, {ok, Doc1}}, {Doc2, {Doc2, {forbidden, <<"not allowed">>}}}]}, Reply + {error, [{Doc2, {Doc2, {forbidden, <<"not allowed">>}}}, {Doc1, {ok, Doc1}}]}, Reply ). % needed for testing to avoid having to start the mem3 application diff --git a/src/mem3/src/mem3_shards.erl b/src/mem3/src/mem3_shards.erl index f48bfdb8a29..f6c0bc3d761 100644 --- a/src/mem3/src/mem3_shards.erl +++ b/src/mem3/src/mem3_shards.erl @@ -362,6 +362,7 @@ changes_callback({stop, EndSeq}, _) -> changes_callback({change, {Change}, _}, _) -> DbName = couch_util:get_value(<<"id">>, Change), Seq = couch_util:get_value(<<"seq">>, Change), + %couch_log:error("~nChange: ~p~n", [Change]), case DbName of <<"_design/", _/binary>> -> ok; From 651df0a64be10bf5a3f7f4527dbd6820cbbd37b7 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 12:48:36 +0200 Subject: [PATCH 17/32] feat(access): add global off switch --- rel/overlay/etc/default.ini | 4 ++++ src/chttpd/src/chttpd_db.erl | 9 +++++++-- src/couch/test/eunit/couchdb_access_tests.erl | 1 + 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/rel/overlay/etc/default.ini b/rel/overlay/etc/default.ini index ae691bb8dbf..0612506e335 100644 --- a/rel/overlay/etc/default.ini +++ b/rel/overlay/etc/default.ini @@ -388,6 +388,10 @@ authentication_db = _users ; max_iterations, password_scheme, password_regexp, proxy_use_secret, ; public_fields, secret, users_db_public, cookie_domain, same_site +; Per document access settings +[per_doc_access] +;enabled = false + ; CSP (Content Security Policy) Support [csp] ;utils_enable = true diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index 29daf567404..d26824bdd9c 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -2006,9 +2006,14 @@ parse_shards_opt("placement", Req, Default) -> parse_shards_opt("access", Req, Value) when is_list(Value) -> parse_shards_opt("access", Req, list_to_existing_atom(Value)); parse_shards_opt("access", _Req, Value) when is_boolean(Value) -> - Value; + case config:get_boolean("per_doc_access", "enabled", false) of + true -> Value; + false -> + Err = ?l2b(["The `access` is not available on this CouchDB installation."]), + throw({bad_request, Err}) + end; parse_shards_opt("access", _Req, _Value) -> - Err = ?l2b(["The woopass `access` value should be a boolean."]), + Err = ?l2b(["The `access` value should be a boolean."]), throw({bad_request, Err}); parse_shards_opt(Param, Req, Default) -> diff --git a/src/couch/test/eunit/couchdb_access_tests.erl b/src/couch/test/eunit/couchdb_access_tests.erl index 28f27ea72ee..1b656499ce2 100644 --- a/src/couch/test/eunit/couchdb_access_tests.erl +++ b/src/couch/test/eunit/couchdb_access_tests.erl @@ -46,6 +46,7 @@ before_all() -> ok = config:set("admins", "a", binary_to_list(Hashed), _Persist=false), ok = config:set("couchdb", "uuid", "21ac467c1bc05e9d9e9d2d850bb1108f", _Persist=false), ok = config:set("log", "level", "debug", _Persist=false), + ok = config:set("per_doc_access", "enabled", "true", _Persist=false), % cleanup and setup {ok, _, _, _} = test_request:delete(url() ++ "/db", ?ADMIN_REQ_HEADERS), From e1746c5a315b88e4715062feb7ec9610f2d3e762 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 12:52:17 +0200 Subject: [PATCH 18/32] doc(access): leave todo for missing implementation detail --- src/couch/src/couch_db.erl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index b219a188df1..8ba82fea60b 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -810,6 +810,8 @@ validate_access1(true, Db, #doc{meta=Meta}=Doc, Options) -> _False -> validate_access2(Db, Doc) end; _Else -> % only admins can read conflicted docs in _access dbs + % TODO: expand: if leaves agree on _access, then a user should be able + % to proceed normally, only if they disagree should this become admin-only case is_admin(Db) of true -> ok; _Else2 -> throw({forbidden, <<"document is in conflict">>}) From 2084d516072319ed10f8d1af832fdd53f7b5902d Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 12:54:23 +0200 Subject: [PATCH 19/32] chore(access): remove old comment --- src/couch/src/couch_db_updater.erl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index 6067d18b4c2..392f34b77fa 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -791,11 +791,6 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> % at this point, we already validated this Db is access enabled, so do the checks right away. check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx=UserCtx}, Access). -% TODO: looks like we go into validation here unconditionally and only check in -% check_access() whether the Db has_access_enabled(), we should do this -% here on the outside. Might be our perf issue. -% However, if it is, that means we have to speed this up as it would still -% be too slow for when access is enabled. validate_docs_access(Db, UserCtx, DocsList, OldDocInfos) -> case couch_db:has_access_enabled(Db) of true -> validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos); From 3c5bdf42abbfab1d3d14ceef42e8655fd906189e Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 15:35:24 +0200 Subject: [PATCH 20/32] fix(access): use minimal info from prev rev --- src/chttpd/src/chttpd_db.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index d26824bdd9c..f46847b3c32 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -965,7 +965,7 @@ db_doc_req(#httpd{method = 'DELETE'} = Req, Db, DocId) -> Rev -> Body = {[{<<"_rev">>, ?l2b(Rev)}, {<<"_deleted">>, true}]} end, - Doc = Doc0#doc{revs=Revs,body=Body,deleted=true}, + Doc = #doc{revs=Revs,body=Body,deleted=true,access=Doc0#doc.access}, send_updated_doc(Req, Db, DocId, couch_doc_from_req(Req, Db, DocId, Doc)); db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) -> #doc_query_args{ From fa8585c012edf1d216836589798ce185f2fc8412 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 15:39:05 +0200 Subject: [PATCH 21/32] chore(access): style notes --- src/couch/src/couch_db_updater.erl | 2 +- src/couch/src/couch_httpd_auth.erl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index 392f34b77fa..75b1afc9274 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -824,7 +824,7 @@ validate_docs_access(Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], Doc true -> % if valid, then send to DocsListValidated, OldDocsInfo % and store the access context on the new doc [{Client, Doc} | Acc]; - _Else2 -> % if invalid, then send_result tagged `access`(c.f. `conflict) + false -> % if invalid, then send_result tagged `access`(c.f. `conflict) % and don’t add to DLV, nor ODI send_result(Client, Doc, access), Acc diff --git a/src/couch/src/couch_httpd_auth.erl b/src/couch/src/couch_httpd_auth.erl index 2e62c88919b..a6cffce39f1 100644 --- a/src/couch/src/couch_httpd_auth.erl +++ b/src/couch/src/couch_httpd_auth.erl @@ -103,7 +103,7 @@ extract_roles(UserProps) -> Roles = couch_util:get_value(<<"roles">>, UserProps, []), case lists:member(<<"_admin">>, Roles) of true -> Roles; - _ -> Roles ++ [<<"_users">>] + _ -> [<<"_users">> | Roles] end. default_authentication_handler(Req) -> From 92f36af80e4c3d21713ea7519053bf56ccdf5740 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 16:25:58 +0200 Subject: [PATCH 22/32] doc(access): add todos --- src/couch/src/couch_db.erl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index 8ba82fea60b..7c7a67500fd 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -821,6 +821,7 @@ validate_access2(Db, Doc) -> validate_access3(check_access(Db, Doc)). validate_access3(true) -> ok; +% TODO: fix language validate_access3(_) -> throw({forbidden, <<"can't touch this">>}). check_access(Db, #doc{access=Access}) -> @@ -854,6 +855,7 @@ check_name(null, _Access) -> true; check_name(UserName, Access) -> lists:member(UserName, Access). % nicked from couch_db:check_security +% TODO: might need DRY check_roles(Roles, Access) -> UserRolesSet = ordsets:from_list(Roles), From b6a7521147f053bd725279d03a3309b046309c53 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 16:30:30 +0200 Subject: [PATCH 23/32] fix(access): opt-out switch --- src/chttpd/src/chttpd_db.erl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index f46847b3c32..24ef1d0b917 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -2005,13 +2005,15 @@ parse_shards_opt("placement", Req, Default) -> parse_shards_opt("access", Req, Value) when is_list(Value) -> parse_shards_opt("access", Req, list_to_existing_atom(Value)); -parse_shards_opt("access", _Req, Value) when is_boolean(Value) -> +parse_shards_opt("access", _Req, Value) when Value =:= true -> case config:get_boolean("per_doc_access", "enabled", false) of - true -> Value; + true -> true; false -> - Err = ?l2b(["The `access` is not available on this CouchDB installation."]), + Err = ?l2b(["The `access` option is not available on this CouchDB installation."]), throw({bad_request, Err}) end; +parse_shards_opt("access", _Req, Value) when Value =:= false -> + false; parse_shards_opt("access", _Req, _Value) -> Err = ?l2b(["The `access` value should be a boolean."]), throw({bad_request, Err}); From a443c037de042dcf30e756ff5c6f54efea62ef32 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 16:55:30 +0200 Subject: [PATCH 24/32] test(access): test disable access config --- src/couch/test/eunit/couchdb_access_tests.erl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/couch/test/eunit/couchdb_access_tests.erl b/src/couch/test/eunit/couchdb_access_tests.erl index 1b656499ce2..33fb576ff61 100644 --- a/src/couch/test/eunit/couchdb_access_tests.erl +++ b/src/couch/test/eunit/couchdb_access_tests.erl @@ -73,6 +73,10 @@ after_all(_) -> access_test_() -> Tests = [ + + % Server config + fun should_not_let_create_access_db_if_disabled/2, + % Doc creation fun should_not_let_anonymous_user_create_doc/2, fun should_let_admin_create_doc_with_access/2, @@ -167,6 +171,12 @@ make_test_cases(Mod, Funs) -> % end). % +should_not_let_create_access_db_if_disabled(_PortType, Url) -> + ok = config:set("per_doc_access", "enabled", "false", _Persist=false), + {ok, Code, _, _} = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), + ok = config:set("per_doc_access", "enabled", "true", _Persist=false), + ?_assertEqual(400, Code). + should_not_let_anonymous_user_create_doc(_PortType, Url) -> % TODO: debugging leftover % BulkDocsBody = {[ From 3776dca5bde1bc7549e81d862c95deee32b91213 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 17:49:13 +0200 Subject: [PATCH 25/32] fix(access): elixir tests --- test/elixir/test/proxyauth_test.exs | 2 -- test/elixir/test/security_validation_test.exs | 2 +- test/elixir/test/users_db_security_test.exs | 6 +++--- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/test/elixir/test/proxyauth_test.exs b/test/elixir/test/proxyauth_test.exs index ea57c1a0e54..2c6e3d530ea 100644 --- a/test/elixir/test/proxyauth_test.exs +++ b/test/elixir/test/proxyauth_test.exs @@ -5,7 +5,6 @@ defmodule ProxyAuthTest do @tag :with_db test "proxy auth with secret" do - users_db_name = random_db_name() create_db(users_db_name) @@ -79,7 +78,6 @@ defmodule ProxyAuthTest do @tag :with_db test "proxy auth without secret" do - users_db_name = random_db_name() create_db(users_db_name) diff --git a/test/elixir/test/security_validation_test.exs b/test/elixir/test/security_validation_test.exs index 2bb87fd836c..c147ddd0302 100644 --- a/test/elixir/test/security_validation_test.exs +++ b/test/elixir/test/security_validation_test.exs @@ -149,7 +149,7 @@ defmodule SecurityValidationTest do headers = @auth_headers[:jerry] resp = Couch.get("/_session", headers: headers) assert resp.body["userCtx"]["name"] == "jerry" - assert info["userCtx"]["roles"] == ["_users"] + assert resp.body["userCtx"]["roles"] == ["_users"] end @tag :with_db diff --git a/test/elixir/test/users_db_security_test.exs b/test/elixir/test/users_db_security_test.exs index 65674904057..3945d2bfa9d 100644 --- a/test/elixir/test/users_db_security_test.exs +++ b/test/elixir/test/users_db_security_test.exs @@ -400,11 +400,11 @@ defmodule UsersDbSecurityTest do # admin should be able to read from any view resp = view_as(@users_db, "user_db_auth/test", user: "jerry") - assert resp.body["total_rows"] == 3 + assert resp.body["total_rows"] == 4 # db admin should be able to read from any view resp = view_as(@users_db, "user_db_auth/test", user: "speedy") - assert resp.body["total_rows"] == 3 + assert resp.body["total_rows"] == 4 # non-admins can't read design docs open_as(@users_db, "_design/user_db_auth", @@ -419,7 +419,7 @@ defmodule UsersDbSecurityTest do request_raw_as(@users_db, "_design/user_db_auth/_list/names/test", user: "jerry") assert result.status_code == 200 - assert length(String.split(result.body, "\n")) == 4 + assert length(String.split(result.body, "\n")) == 5 # non-admins can't read _list request_raw_as(@users_db, "_design/user_db_auth/_list/names/test", From d6863a73f9be7e01a28939f983e2fe072ffe2f7f Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 6 Aug 2022 17:53:28 +0200 Subject: [PATCH 26/32] chore(access): erlfmt --- src/chttpd/src/chttpd_db.erl | 11 +- src/chttpd/src/chttpd_view.erl | 28 +- src/couch/src/couch_access_native_proc.erl | 64 +- src/couch/src/couch_bt_engine.erl | 1 - src/couch/src/couch_btree.erl | 8 +- src/couch/src/couch_db.erl | 199 +-- src/couch/src/couch_db_updater.erl | 116 +- src/couch/src/couch_doc.erl | 7 +- src/couch/src/couch_util.erl | 5 +- src/couch/test/eunit/couchdb_access_tests.erl | 1293 +++++++++++------ .../eunit/couchdb_update_conflicts_tests.erl | 3 +- src/couch_index/src/couch_index_updater.erl | 13 +- src/couch_index/src/couch_index_util.erl | 2 +- src/couch_mrview/src/couch_mrview.erl | 113 +- src/couch_mrview/src/couch_mrview_updater.erl | 56 +- src/couch_mrview/src/couch_mrview_util.erl | 2 +- src/couch_replicator/src/couch_replicator.erl | 17 +- .../src/couch_replicator_scheduler_job.erl | 15 +- src/fabric/src/fabric_doc_update.erl | 6 +- 19 files changed, 1224 insertions(+), 735 deletions(-) diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index 24ef1d0b917..50069a8679b 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -965,7 +965,7 @@ db_doc_req(#httpd{method = 'DELETE'} = Req, Db, DocId) -> Rev -> Body = {[{<<"_rev">>, ?l2b(Rev)}, {<<"_deleted">>, true}]} end, - Doc = #doc{revs=Revs,body=Body,deleted=true,access=Doc0#doc.access}, + Doc = #doc{revs = Revs, body = Body, deleted = true, access = Doc0#doc.access}, send_updated_doc(Req, Db, DocId, couch_doc_from_req(Req, Db, DocId, Doc)); db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) -> #doc_query_args{ @@ -1416,7 +1416,7 @@ receive_request_data(Req, LenLeft) when LenLeft > 0 -> receive_request_data(_Req, _) -> throw(<<"expected more data">>). -update_doc_result_to_json({#doc{id=Id,revs=Rev}, access}) -> +update_doc_result_to_json({#doc{id = Id, revs = Rev}, access}) -> update_doc_result_to_json({{Id, Rev}, access}); update_doc_result_to_json({error, _} = Error) -> {_Code, Err, Msg} = chttpd:error_info(Error), @@ -2001,13 +2001,12 @@ parse_shards_opt("placement", Req, Default) -> throw({bad_request, Err}) end end; - - parse_shards_opt("access", Req, Value) when is_list(Value) -> parse_shards_opt("access", Req, list_to_existing_atom(Value)); parse_shards_opt("access", _Req, Value) when Value =:= true -> case config:get_boolean("per_doc_access", "enabled", false) of - true -> true; + true -> + true; false -> Err = ?l2b(["The `access` option is not available on this CouchDB installation."]), throw({bad_request, Err}) @@ -2017,7 +2016,6 @@ parse_shards_opt("access", _Req, Value) when Value =:= false -> parse_shards_opt("access", _Req, _Value) -> Err = ?l2b(["The `access` value should be a boolean."]), throw({bad_request, Err}); - parse_shards_opt(Param, Req, Default) -> couch_log:error("~n parse_shards_opt Param: ~p, Default: ~p~n", [Param, Default]), Val = chttpd:qs_value(Req, Param, Default), @@ -2027,7 +2025,6 @@ parse_shards_opt(Param, Req, Default) -> false -> throw({bad_request, Err}) end. - parse_engine_opt(Req) -> case chttpd:qs_value(Req, "engine") of undefined -> diff --git a/src/chttpd/src/chttpd_view.erl b/src/chttpd/src/chttpd_view.erl index f74088dbcc6..44459b3cfc9 100644 --- a/src/chttpd/src/chttpd_view.erl +++ b/src/chttpd/src/chttpd_view.erl @@ -69,20 +69,20 @@ fabric_query_view(Db, Req, DDoc, ViewName, Args) -> Max = chttpd:chunked_response_buffer_size(), VAcc = #vacc{db = Db, req = Req, threshold = Max}, Options = [{user_ctx, Req#httpd.user_ctx}], -% {ok, Resp} = fabric:query_view(Db, Options, DDoc, ViewName, -% fun view_cb/2, VAcc, Args), -% {ok, Resp#vacc.resp}. -% % TODO: This might just be a debugging leftover, we might be able -% % to undo this by just returning {ok, Resp#vacc.resp} -% % However, this *might* be here because we need to handle -% % errors here now, because access might tell us to. -% case fabric:query_view(Db, Options, DDoc, ViewName, -% fun view_cb/2, VAcc, Args) of -% {ok, Resp} -> -% {ok, Resp#vacc.resp}; -% {error, Error} -> -% throw(Error) -% end. + % {ok, Resp} = fabric:query_view(Db, Options, DDoc, ViewName, + % fun view_cb/2, VAcc, Args), + % {ok, Resp#vacc.resp}. + % % TODO: This might just be a debugging leftover, we might be able + % % to undo this by just returning {ok, Resp#vacc.resp} + % % However, this *might* be here because we need to handle + % % errors here now, because access might tell us to. + % case fabric:query_view(Db, Options, DDoc, ViewName, + % fun view_cb/2, VAcc, Args) of + % {ok, Resp} -> + % {ok, Resp#vacc.resp}; + % {error, Error} -> + % throw(Error) + % end. {ok, Resp} = fabric:query_view( Db, diff --git a/src/couch/src/couch_access_native_proc.erl b/src/couch/src/couch_access_native_proc.erl index 965b124de4a..38c8e573814 100644 --- a/src/couch/src/couch_access_native_proc.erl +++ b/src/couch/src/couch_access_native_proc.erl @@ -13,7 +13,6 @@ -module(couch_access_native_proc). -behavior(gen_server). - -export([ start_link/0, set_timeout/2, @@ -29,71 +28,55 @@ code_change/3 ]). - -record(st, { indexes = [], - timeout = 5000 % TODO: make configurable + % TODO: make configurable + timeout = 5000 }). start_link() -> gen_server:start_link(?MODULE, [], []). - set_timeout(Pid, TimeOut) when is_integer(TimeOut), TimeOut > 0 -> gen_server:call(Pid, {set_timeout, TimeOut}). - prompt(Pid, Data) -> gen_server:call(Pid, {prompt, Data}). - init(_) -> {ok, #st{}}. - terminate(_Reason, _St) -> ok. - handle_call({set_timeout, TimeOut}, _From, St) -> - {reply, ok, St#st{timeout=TimeOut}}; - + {reply, ok, St#st{timeout = TimeOut}}; handle_call({prompt, [<<"reset">>]}, _From, St) -> - {reply, true, St#st{indexes=[]}}; - + {reply, true, St#st{indexes = []}}; handle_call({prompt, [<<"reset">>, _QueryConfig]}, _From, St) -> - {reply, true, St#st{indexes=[]}}; - + {reply, true, St#st{indexes = []}}; handle_call({prompt, [<<"add_fun">>, IndexInfo]}, _From, St) -> {reply, true, St}; - handle_call({prompt, [<<"map_doc">>, Doc]}, _From, St) -> {reply, map_doc(St, mango_json:to_binary(Doc)), St}; - handle_call({prompt, [<<"reduce">>, _, _]}, _From, St) -> {reply, null, St}; - handle_call({prompt, [<<"rereduce">>, _, _]}, _From, St) -> {reply, null, St}; - handle_call({prompt, [<<"index_doc">>, Doc]}, _From, St) -> {reply, [[]], St}; - handle_call(Msg, _From, St) -> {stop, {invalid_call, Msg}, {invalid_call, Msg}, St}. handle_cast(garbage_collect, St) -> erlang:garbage_collect(), {noreply, St}; - handle_cast(Msg, St) -> {stop, {invalid_cast, Msg}, St}. - handle_info(Msg, St) -> {stop, {invalid_info, Msg}, St}. - code_change(_OldVsn, St, _Extra) -> {ok, St}. @@ -115,7 +98,8 @@ code_change(_OldVsn, St, _Extra) -> map_doc(_St, {Doc}) -> case couch_util:get_value(<<"_access">>, Doc) of undefined -> - [[],[]]; % do not index this doc + % do not index this doc + [[], []]; Access when is_list(Access) -> Id = couch_util:get_value(<<"_id">>, Doc), Rev = couch_util:get_value(<<"_rev">>, Doc), @@ -123,21 +107,33 @@ map_doc(_St, {Doc}) -> Deleted = couch_util:get_value(<<"_deleted">>, Doc, false), BodySp = couch_util:get_value(<<"_body_sp">>, Doc), % by-access-id - ById = case Deleted of - false -> - lists:map(fun(UserOrRole) -> [ - [[UserOrRole, Id], Rev] - ] end, Access); - _True -> [[]] - end, + ById = + case Deleted of + false -> + lists:map( + fun(UserOrRole) -> + [ + [[UserOrRole, Id], Rev] + ] + end, + Access + ); + _True -> + [[]] + end, % by-access-seq - BySeq = lists:map(fun(UserOrRole) -> [ - [[UserOrRole, Seq], [{rev, Rev}, {deleted, Deleted}, {body_sp, BodySp}]] - ] end, Access), + BySeq = lists:map( + fun(UserOrRole) -> + [ + [[UserOrRole, Seq], [{rev, Rev}, {deleted, Deleted}, {body_sp, BodySp}]] + ] + end, + Access + ), ById ++ BySeq; Else -> % TODO: no comprende: should not be needed once we implement % _access field validation - [[],[]] + [[], []] end. diff --git a/src/couch/src/couch_bt_engine.erl b/src/couch/src/couch_bt_engine.erl index bd778f33b49..d653a8eea20 100644 --- a/src/couch/src/couch_bt_engine.erl +++ b/src/couch/src/couch_bt_engine.erl @@ -672,7 +672,6 @@ id_tree_split(#full_doc_info{} = Info) -> id_tree_join(Id, {HighSeq, Deleted, DiskTree}) -> % Handle old formats before data_size was added id_tree_join(Id, {HighSeq, Deleted, #size_info{}, DiskTree}); - id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree}) -> id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, []}); id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, Access}) -> diff --git a/src/couch/src/couch_btree.erl b/src/couch/src/couch_btree.erl index d7ca7bab4bf..b908421f202 100644 --- a/src/couch/src/couch_btree.erl +++ b/src/couch/src/couch_btree.erl @@ -116,9 +116,11 @@ full_reduce_with_options(Bt, Options0) -> end, [UserName] = proplists:get_value(start_key, Options0, <<"">>), EndKey = {[UserName, {[]}]}, - Options = Options0 ++ [ - {end_key, EndKey} - ], + Options = + Options0 ++ + [ + {end_key, EndKey} + ], fold_reduce(Bt, CountFun, 0, Options). size(#btree{root = nil}) -> diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index 7c7a67500fd..cc2810ae59a 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -140,7 +140,8 @@ ]). -include_lib("couch/include/couch_db.hrl"). --include_lib("couch_mrview/include/couch_mrview.hrl"). % TODO: can we do without this? +% TODO: can we do without this? +-include_lib("couch_mrview/include/couch_mrview.hrl"). -include("couch_db_int.hrl"). -define(DBNAME_REGEX, @@ -292,7 +293,7 @@ wait_for_compaction(#db{main_pid = Pid} = Db, Timeout) -> is_compacting(DbName) -> couch_server:is_compacting(DbName). -has_access_enabled(#db{access=true}) -> true; +has_access_enabled(#db{access = true}) -> true; has_access_enabled(_) -> false. is_read_from_ddoc_cache(Options) -> @@ -308,10 +309,11 @@ open_doc(Db, IdOrDocInfo) -> open_doc(Db, Id, Options0) -> increment_stat(Db, [couchdb, database_reads]), - Options = case has_access_enabled(Db) of - true -> Options0 ++ [conflicts]; - _Else -> Options0 - end, + Options = + case has_access_enabled(Db) of + true -> Options0 ++ [conflicts]; + _Else -> Options0 + end, case open_doc_int(Db, Id, Options) of {ok, #doc{deleted = true} = Doc} -> case lists:member(deleted, Options) of @@ -791,8 +793,8 @@ security_error_type(#user_ctx{name = null}) -> security_error_type(#user_ctx{name = _}) -> forbidden. -is_per_user_ddoc(#doc{access=[]}) -> false; -is_per_user_ddoc(#doc{access=[<<"_users">>]}) -> false; +is_per_user_ddoc(#doc{access = []}) -> false; +is_per_user_ddoc(#doc{access = [<<"_users">>]}) -> false; is_per_user_ddoc(_) -> true. validate_access(Db, Doc) -> @@ -801,17 +803,20 @@ validate_access(Db, Doc) -> validate_access(Db, Doc, Options) -> validate_access1(has_access_enabled(Db), Db, Doc, Options). -validate_access1(false, _Db, _Doc, _Options) -> ok; -validate_access1(true, Db, #doc{meta=Meta}=Doc, Options) -> +validate_access1(false, _Db, _Doc, _Options) -> + ok; +validate_access1(true, Db, #doc{meta = Meta} = Doc, Options) -> case proplists:get_value(conflicts, Meta) of - undefined -> % no conflicts + % no conflicts + undefined -> case is_read_from_ddoc_cache(Options) andalso is_per_user_ddoc(Doc) of true -> throw({not_found, missing}); _False -> validate_access2(Db, Doc) end; - _Else -> % only admins can read conflicted docs in _access dbs - % TODO: expand: if leaves agree on _access, then a user should be able - % to proceed normally, only if they disagree should this become admin-only + % only admins can read conflicted docs in _access dbs + _Else -> + % TODO: expand: if leaves agree on _access, then a user should be able + % to proceed normally, only if they disagree should this become admin-only case is_admin(Db) of true -> ok; _Else2 -> throw({forbidden, <<"document is in conflict">>}) @@ -824,36 +829,35 @@ validate_access3(true) -> ok; % TODO: fix language validate_access3(_) -> throw({forbidden, <<"can't touch this">>}). -check_access(Db, #doc{access=Access}) -> +check_access(Db, #doc{access = Access}) -> check_access(Db, Access); check_access(Db, Access) -> %couch_log:notice("~n Db.user_ctx: ~p, Access: ~p ~n", [Db#db.user_ctx, Access]), #user_ctx{ - name=UserName, - roles=UserRoles + name = UserName, + roles = UserRoles } = Db#db.user_ctx, case Access of - [] -> - % if doc has no _access, userCtX must be admin - is_admin(Db); - Access -> - % if doc has _access, userCtx must be admin OR matching user or role - % _access = ["a", "b", ] - case is_admin(Db) of - true -> - true; - _ -> - case {check_name(UserName, Access), check_roles(UserRoles, Access)} of - {true, _} -> true; - {_, true} -> true; - _ -> false + [] -> + % if doc has no _access, userCtX must be admin + is_admin(Db); + Access -> + % if doc has _access, userCtx must be admin OR matching user or role + % _access = ["a", "b", ] + case is_admin(Db) of + true -> + true; + _ -> + case {check_name(UserName, Access), check_roles(UserRoles, Access)} of + {true, _} -> true; + {_, true} -> true; + _ -> false + end end - end end. check_name(null, _Access) -> true; -check_name(UserName, Access) -> - lists:member(UserName, Access). +check_name(UserName, Access) -> lists:member(UserName, Access). % nicked from couch_db:check_security % TODO: might need DRY @@ -1003,14 +1007,14 @@ group_alike_docs([Doc | Rest], [Bucket | RestBuckets]) -> end. validate_doc_update(#db{} = Db, #doc{id = <<"_design/", _/binary>>} = Doc, _GetDiskDocFun) -> - case couch_doc:has_access(Doc) of - true -> - validate_ddoc(Db, Doc); - _Else -> - case catch check_is_admin(Db) of - ok -> validate_ddoc(Db, Doc); - Error -> Error - end + case couch_doc:has_access(Doc) of + true -> + validate_ddoc(Db, Doc); + _Else -> + case catch check_is_admin(Db) of + ok -> validate_ddoc(Db, Doc); + Error -> Error + end end; validate_doc_update(#db{validate_doc_funs = undefined} = Db, Doc, Fun) -> ValidationFuns = load_validation_funs(Db), @@ -1411,24 +1415,28 @@ validate_update(Db, Doc) -> Error -> Error end. - validate_docs_access(Db, DocBuckets, DocErrors) -> - validate_docs_access1(Db, DocBuckets, {[], DocErrors}). + validate_docs_access1(Db, DocBuckets, {[], DocErrors}). validate_docs_access1(_Db, [], {DocBuckets0, DocErrors}) -> - DocBuckets1 = lists:reverse(lists:map(fun lists:reverse/1, DocBuckets0)), - DocBuckets = case DocBuckets1 of - [[]] -> []; - Else -> Else - end, + DocBuckets1 = lists:reverse(lists:map(fun lists:reverse/1, DocBuckets0)), + DocBuckets = + case DocBuckets1 of + [[]] -> []; + Else -> Else + end, {ok, DocBuckets, lists:reverse(DocErrors)}; -validate_docs_access1(Db, [DocBucket|RestBuckets], {DocAcc, ErrorAcc}) -> - {NewBuckets, NewErrors} = lists:foldl(fun(Doc, {Acc, ErrAcc}) -> - case catch validate_access(Db, Doc) of - ok -> {[Doc|Acc], ErrAcc}; - Error -> {Acc, [{doc_tag(Doc), Error}|ErrAcc]} - end - end, {[], ErrorAcc}, DocBucket), +validate_docs_access1(Db, [DocBucket | RestBuckets], {DocAcc, ErrorAcc}) -> + {NewBuckets, NewErrors} = lists:foldl( + fun(Doc, {Acc, ErrAcc}) -> + case catch validate_access(Db, Doc) of + ok -> {[Doc | Acc], ErrAcc}; + Error -> {Acc, [{doc_tag(Doc), Error} | ErrAcc]} + end + end, + {[], ErrorAcc}, + DocBucket + ), validate_docs_access1(Db, RestBuckets, {[NewBuckets | DocAcc], NewErrors}). update_docs(Db, Docs0, Options, ?REPLICATED_CHANGES) -> @@ -1461,28 +1469,34 @@ update_docs(Db, Docs0, Options, ?REPLICATED_CHANGES) -> [merge_conflicts | Options] ), case couch_db:has_access_enabled(Db) of - false -> - % we’re done here - {ok, DocErrors}; - _ -> - AccessViolations = lists:filter(fun({_Ref, Tag}) -> Tag =:= access end, Results), - case length(AccessViolations) of - 0 -> - % we’re done here - {ok, DocErrors}; - _ -> - % dig out FDIs from Docs matching our tags/refs - DocsDict = lists:foldl(fun(Doc, Dict) -> - Tag = doc_tag(Doc), - dict:store(Tag, Doc, Dict) - end, dict:new(), Docs), - AccessResults = lists:map(fun({Ref, Access}) -> - { dict:fetch(Ref, DocsDict), Access } - end, AccessViolations), - {ok, AccessResults} - end - end; - + false -> + % we’re done here + {ok, DocErrors}; + _ -> + AccessViolations = lists:filter(fun({_Ref, Tag}) -> Tag =:= access end, Results), + case length(AccessViolations) of + 0 -> + % we’re done here + {ok, DocErrors}; + _ -> + % dig out FDIs from Docs matching our tags/refs + DocsDict = lists:foldl( + fun(Doc, Dict) -> + Tag = doc_tag(Doc), + dict:store(Tag, Doc, Dict) + end, + dict:new(), + Docs + ), + AccessResults = lists:map( + fun({Ref, Access}) -> + {dict:fetch(Ref, DocsDict), Access} + end, + AccessViolations + ), + {ok, AccessResults} + end + end; update_docs(Db, Docs0, Options, ?INTERACTIVE_EDIT) -> Docs = tag_docs(Docs0), @@ -2007,7 +2021,10 @@ open_doc_revs_int(Db, IdRevs, Options) -> % we have the rev in our list but know nothing about it {{not_found, missing}, {Pos, Rev}}; #leaf{deleted = IsDeleted, ptr = SummaryPtr} -> - {ok, make_doc(Db, Id, IsDeleted, SummaryPtr, FoundRevPath, Access)} + {ok, + make_doc( + Db, Id, IsDeleted, SummaryPtr, FoundRevPath, Access + )} end end, FoundRevs @@ -2029,29 +2046,33 @@ open_doc_revs_int(Db, IdRevs, Options) -> open_doc_int(Db, <> = Id, Options) -> case couch_db_engine:open_local_docs(Db, [Id]) of [#doc{} = Doc] -> - case Doc#doc.body of - { Body } -> - Access = couch_util:get_value(<<"_access">>, Body), - apply_open_options(Db, {ok, Doc#doc{access = Access}}, Options); - _Else -> - apply_open_options(Db, {ok, Doc}, Options) - end; + case Doc#doc.body of + {Body} -> + Access = couch_util:get_value(<<"_access">>, Body), + apply_open_options(Db, {ok, Doc#doc{access = Access}}, Options); + _Else -> + apply_open_options(Db, {ok, Doc}, Options) + end; [not_found] -> {not_found, missing} end; open_doc_int(Db, #doc_info{id = Id, revs = [RevInfo | _], access = Access} = DocInfo, Options) -> #rev_info{deleted = IsDeleted, rev = {Pos, RevId}, body_sp = Bp} = RevInfo, Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId]}, Access), - apply_open_options(Db, + apply_open_options( + Db, {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, Options ); -open_doc_int(Db, #full_doc_info{id = Id, rev_tree = RevTree, access = Access} = FullDocInfo, Options) -> +open_doc_int( + Db, #full_doc_info{id = Id, rev_tree = RevTree, access = Access} = FullDocInfo, Options +) -> #doc_info{revs = [#rev_info{deleted = IsDeleted, rev = Rev, body_sp = Bp} | _]} = DocInfo = couch_doc:to_doc_info(FullDocInfo), {[{_, RevPath}], []} = couch_key_tree:get(RevTree, [Rev]), Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath, Access), - apply_open_options(Db, + apply_open_options( + Db, {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, Options ); diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index 75b1afc9274..02136d83b3c 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -25,8 +25,8 @@ -define(DEFAULT_MAX_PARTITION_SIZE, 16#280000000). -define(DEFAULT_SECURITY_OBJECT, [ - {<<"members">>,{[{<<"roles">>,[<<"_admin">>]}]}}, - {<<"admins">>, {[{<<"roles">>,[<<"_admin">>]}]}} + {<<"members">>, {[{<<"roles">>, [<<"_admin">>]}]}}, + {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} ]). -record(merge_acc, { @@ -266,10 +266,11 @@ sort_and_tag_grouped_docs(Client, GroupedDocs) -> % duplicate documents if the incoming groups are not sorted, so as a sanity % check we sort them again here. See COUCHDB-2735. Cmp = fun - ([], []) -> false; % TODO: re-evaluate this addition, might be - % superflous now - ([#doc{id=A}|_], [#doc{id=B}|_]) -> A < B - end, + % TODO: re-evaluate this addition, might be + ([], []) -> false; + % superflous now + ([#doc{id = A} | _], [#doc{id = B} | _]) -> A < B + end, lists:map( fun(DocGroup) -> [{Client, maybe_tag_doc(D)} || D <- DocGroup] @@ -674,12 +675,12 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> UpdateSeq = couch_db_engine:get_update_seq(Db), RevsLimit = couch_db_engine:get_revs_limit(Db), - Ids = [Id || [{_Client, #doc{id=Id}}|_] <- DocsList], + Ids = [Id || [{_Client, #doc{id = Id}} | _] <- DocsList], % TODO: maybe a perf hit, instead of zip3-ing existing Accesses into % our doc lists, maybe find 404 docs differently down in % validate_docs_access (revs is [], which we can then use % to skip validation as we know it is the first doc rev) - Accesses = [Access || [{_Client, #doc{access=Access}}|_] <- DocsList], + Accesses = [Access || [{_Client, #doc{access = Access}} | _] <- DocsList], % lookup up the old documents, if they exist. OldDocLookups = couch_db_engine:open_docs(Db, Ids), @@ -688,7 +689,7 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> (_Id, #full_doc_info{} = FDI, _Access) -> FDI; (Id, not_found, Access) -> - #full_doc_info{id=Id,access=Access} + #full_doc_info{id = Id, access = Access} end, Ids, OldDocLookups, @@ -737,12 +738,12 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> %couch_log:notice("~nDb: ~p, UserCtx: ~p~n", [Db, UserCtx]), - - { DocsListValidated, OldDocInfosValidated } = validate_docs_access(Db, UserCtx, DocsList, OldDocInfos), + {DocsListValidated, OldDocInfosValidated} = validate_docs_access( + Db, UserCtx, DocsList, OldDocInfos + ), %couch_log:notice("~nDocsListValidated: ~p, OldDocInfosValidated: ~p~n", [DocsListValidated, OldDocInfosValidated]), - {ok, AccOut} = merge_rev_trees(DocsListValidated, OldDocInfosValidated, AccIn), #merge_acc{ add_infos = NewFullDocInfos, @@ -771,7 +772,7 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> % Check if we just updated any non-access design documents, % and update the validation funs if we did. - NonAccessIds = [Id || [{_Client, #doc{id=Id,access=[]}}|_] <- DocsList], + NonAccessIds = [Id || [{_Client, #doc{id = Id, access = []}} | _] <- DocsList], UpdatedDDocIds = lists:flatmap( fun (<<"_design/", _/binary>> = Id) -> [Id]; @@ -789,55 +790,68 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> % true; % at this point, we already validated this Db is access enabled, so do the checks right away. -check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx=UserCtx}, Access). +check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx = UserCtx}, Access). validate_docs_access(Db, UserCtx, DocsList, OldDocInfos) -> case couch_db:has_access_enabled(Db) of true -> validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos); - _Else -> { DocsList, OldDocInfos } + _Else -> {DocsList, OldDocInfos} end. validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos) -> validate_docs_access(Db, UserCtx, DocsList, OldDocInfos, [], []). validate_docs_access(_Db, _UserCtx, [], [], DocsListValidated, OldDocInfosValidated) -> - { lists:reverse(DocsListValidated), lists:reverse(OldDocInfosValidated) }; -validate_docs_access(Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], DocsListValidated, OldDocInfosValidated) -> + {lists:reverse(DocsListValidated), lists:reverse(OldDocInfosValidated)}; +validate_docs_access( + Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], DocsListValidated, OldDocInfosValidated +) -> % loop over Docs as {Client, NewDoc} % validate Doc % if valid, then put back in Docs % if not, then send_result and skip %couch_log:notice("~nvalidate_docs_access() UserCtx: ~p, Docs: ~p, OldInfo: ~p~n", [UserCtx, Docs, OldInfo]), - NewDocs = lists:foldl(fun({ Client, Doc }, Acc) -> - %couch_log:notice("~nvalidate_docs_access lists:foldl() Doc: ~p Doc#doc.access: ~p~n", [Doc, Doc#doc.access]), + NewDocs = lists:foldl( + fun({Client, Doc}, Acc) -> + %couch_log:notice("~nvalidate_docs_access lists:foldl() Doc: ~p Doc#doc.access: ~p~n", [Doc, Doc#doc.access]), - % check if we are allowed to update the doc, skip when new doc - OldDocMatchesAccess = case OldInfo#full_doc_info.rev_tree of - [] -> true; - _ -> check_access(Db, UserCtx, OldInfo#full_doc_info.access) - end, + % check if we are allowed to update the doc, skip when new doc + OldDocMatchesAccess = + case OldInfo#full_doc_info.rev_tree of + [] -> true; + _ -> check_access(Db, UserCtx, OldInfo#full_doc_info.access) + end, - NewDocMatchesAccess = check_access(Db, UserCtx, Doc#doc.access), - %couch_log:notice("~nvalidate_docs_access lists:foldl() OldDocMatchesAccess: ~p, NewDocMatchesAccess: ~p, andalso: ~p~n", [OldDocMatchesAccess, NewDocMatchesAccess, OldDocMatchesAccess andalso NewDocMatchesAccess]), + NewDocMatchesAccess = check_access(Db, UserCtx, Doc#doc.access), + %couch_log:notice("~nvalidate_docs_access lists:foldl() OldDocMatchesAccess: ~p, NewDocMatchesAccess: ~p, andalso: ~p~n", [OldDocMatchesAccess, NewDocMatchesAccess, OldDocMatchesAccess andalso NewDocMatchesAccess]), - case OldDocMatchesAccess andalso NewDocMatchesAccess of - true -> % if valid, then send to DocsListValidated, OldDocsInfo + case OldDocMatchesAccess andalso NewDocMatchesAccess of + % if valid, then send to DocsListValidated, OldDocsInfo + true -> % and store the access context on the new doc - [{Client, Doc} | Acc]; - false -> % if invalid, then send_result tagged `access`(c.f. `conflict) - % and don’t add to DLV, nor ODI - send_result(Client, Doc, access), - Acc - end - end, [], Docs), - - { NewDocsListValidated, NewOldDocInfosValidated } = case length(NewDocs) of - 0 -> % we sent out all docs as invalid access, drop the old doc info associated with it - { [NewDocs | DocsListValidated], OldDocInfosValidated }; - _ -> - { [NewDocs | DocsListValidated], [OldInfo | OldDocInfosValidated] } - end, - validate_docs_access(Db, UserCtx, DocRest, OldInfoRest, NewDocsListValidated, NewOldDocInfosValidated). + [{Client, Doc} | Acc]; + % if invalid, then send_result tagged `access`(c.f. `conflict) + false -> + % and don’t add to DLV, nor ODI + send_result(Client, Doc, access), + Acc + end + end, + [], + Docs + ), + + {NewDocsListValidated, NewOldDocInfosValidated} = + case length(NewDocs) of + % we sent out all docs as invalid access, drop the old doc info associated with it + 0 -> + {[NewDocs | DocsListValidated], OldDocInfosValidated}; + _ -> + {[NewDocs | DocsListValidated], [OldInfo | OldDocInfosValidated]} + end, + validate_docs_access( + Db, UserCtx, DocRest, OldInfoRest, NewDocsListValidated, NewOldDocInfosValidated + ). apply_local_docs_access(Db, Docs) -> apply_local_docs_access1(couch_db:has_access_enabled(Db), Docs). @@ -845,10 +859,13 @@ apply_local_docs_access(Db, Docs) -> apply_local_docs_access1(false, Docs) -> Docs; apply_local_docs_access1(true, Docs) -> - lists:map(fun({Client, #doc{access = Access, body = {Body}} = Doc}) -> - Doc1 = Doc#doc{body = {[{<<"_access">>, Access} | Body]}}, - {Client, Doc1} - end, Docs). + lists:map( + fun({Client, #doc{access = Access, body = {Body}} = Doc}) -> + Doc1 = Doc#doc{body = {[{<<"_access">>, Access} | Body]}}, + {Client, Doc1} + end, + Docs + ). update_local_doc_revs(Docs) -> lists:foldl( @@ -1038,14 +1055,15 @@ get_meta_body_size(Meta) -> default_security_object(<<"shards/", _/binary>>) -> case config:get("couchdb", "default_security", "admin_only") of - "admin_only" -> ?DEFAULT_SECURITY_OBJECT; + "admin_only" -> + ?DEFAULT_SECURITY_OBJECT; Everyone when Everyone == "everyone"; Everyone == "admin_local" -> [] end; default_security_object(_DbName) -> case config:get("couchdb", "default_security", "admin_only") of Admin when Admin == "admin_only"; Admin == "admin_local" -> - ?DEFAULT_SECURITY_OBJECT; + ?DEFAULT_SECURITY_OBJECT; "everyone" -> [] end. diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl index 70d593300a7..dec3301d485 100644 --- a/src/couch/src/couch_doc.erl +++ b/src/couch/src/couch_doc.erl @@ -430,7 +430,10 @@ to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq, a ), [{_RevInfo, WinPath} | _] = SortedRevInfosAndPath, RevInfos = [RevInfo || {RevInfo, _Path} <- SortedRevInfosAndPath], - {#doc_info{id = Id, high_seq = max_seq(Tree, FDISeq), revs = RevInfos, access = Access}, WinPath}. + { + #doc_info{id = Id, high_seq = max_seq(Tree, FDISeq), revs = RevInfos, access = Access}, + WinPath + }. rev_info({#leaf{} = Leaf, {Pos, [RevId | _]}}) -> #rev_info{ @@ -472,7 +475,7 @@ is_deleted(Tree) -> get_access({Props}) -> get_access(couch_doc:from_json_obj({Props})); -get_access(#doc{access=Access}) -> +get_access(#doc{access = Access}) -> Access. has_access(Doc) -> diff --git a/src/couch/src/couch_util.erl b/src/couch/src/couch_util.erl index cb29ce8b3ef..4199ceb9c59 100644 --- a/src/couch/src/couch_util.erl +++ b/src/couch/src/couch_util.erl @@ -876,8 +876,7 @@ validate_design_access(Db, DDoc) -> validate_design_access1(DDoc, couch_db:has_access_enabled(Db)). validate_design_access1(_DDoc, false) -> ok; -validate_design_access1(DDoc, true) -> - is_users_ddoc(DDoc). +validate_design_access1(DDoc, true) -> is_users_ddoc(DDoc). -is_users_ddoc(#doc{access=[<<"_users">>]}) -> ok; +is_users_ddoc(#doc{access = [<<"_users">>]}) -> ok; is_users_ddoc(_) -> throw({forbidden, <<"per-user ddoc access">>}). diff --git a/src/couch/test/eunit/couchdb_access_tests.erl b/src/couch/test/eunit/couchdb_access_tests.erl index 33fb576ff61..126e43fb262 100644 --- a/src/couch/test/eunit/couchdb_access_tests.erl +++ b/src/couch/test/eunit/couchdb_access_tests.erl @@ -18,10 +18,12 @@ -define(ADMIN_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"a", "a"}}]). -define(USERX_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"x", "x"}}]). -define(USERY_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"y", "y"}}]). --define(SECURITY_OBJECT, {[ - {<<"members">>,{[{<<"roles">>,[<<"_admin">>, <<"_users">>]}]}}, - {<<"admins">>, {[{<<"roles">>,[<<"_admin">>]}]}} -]}). +-define(SECURITY_OBJECT, + {[ + {<<"members">>, {[{<<"roles">>, [<<"_admin">>, <<"_users">>]}]}}, + {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} + ]} +). url() -> Addr = config:get("httpd", "bind_address", "127.0.0.1"), @@ -31,7 +33,9 @@ before_each(_) -> R = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), %?debugFmt("~nRequest: ~p~n", [R]), {ok, 201, _, _} = R, - {ok, _, _, _} = test_request:put(url() ++ "/db/_security", ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db/_security", ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT) + ), url(). after_each(_, Url) -> @@ -43,10 +47,10 @@ after_each(_, Url) -> before_all() -> Couch = test_util:start_couch([chttpd, couch_replicator]), Hashed = couch_passwords:hash_admin_password("a"), - ok = config:set("admins", "a", binary_to_list(Hashed), _Persist=false), - ok = config:set("couchdb", "uuid", "21ac467c1bc05e9d9e9d2d850bb1108f", _Persist=false), - ok = config:set("log", "level", "debug", _Persist=false), - ok = config:set("per_doc_access", "enabled", "true", _Persist=false), + ok = config:set("admins", "a", binary_to_list(Hashed), _Persist = false), + ok = config:set("couchdb", "uuid", "21ac467c1bc05e9d9e9d2d850bb1108f", _Persist = false), + ok = config:set("log", "level", "debug", _Persist = false), + ok = config:set("per_doc_access", "enabled", "true", _Persist = false), % cleanup and setup {ok, _, _, _} = test_request:delete(url() ++ "/db", ?ADMIN_REQ_HEADERS), @@ -73,7 +77,6 @@ after_all(_) -> access_test_() -> Tests = [ - % Server config fun should_not_let_create_access_db_if_disabled/2, @@ -109,7 +112,6 @@ access_test_() -> fun should_let_admin_fetch_all_docs/2, fun should_let_user_fetch_their_own_all_docs/2, - % _changes fun should_let_admin_fetch_changes/2, fun should_let_user_fetch_their_own_changes/2, @@ -134,7 +136,6 @@ access_test_() -> % _revs_diff for docs you don’t have access to fun should_not_allow_user_to_revs_diff_other_docs/2 - % TODO: create test db with role and not _users in _security.members % and make sure a user in that group can access while a user not % in that group cant @@ -145,7 +146,8 @@ access_test_() -> "Access tests", { setup, - fun before_all/0, fun after_all/1, + fun before_all/0, + fun after_all/1, [ make_test_cases(clustered, Tests) ] @@ -159,7 +161,7 @@ make_test_cases(Mod, Funs) -> }. % Doc creation - % http://127.0.0.1:64903/db/a?revs=true&open_revs=%5B%221-23202479633c2b380f79507a776743d5%22%5D&latest=true +% http://127.0.0.1:64903/db/a?revs=true&open_revs=%5B%221-23202479633c2b380f79507a776743d5%22%5D&latest=true % should_do_the_thing(_PortType, Url) -> % ?_test(begin @@ -172,9 +174,9 @@ make_test_cases(Mod, Funs) -> % should_not_let_create_access_db_if_disabled(_PortType, Url) -> - ok = config:set("per_doc_access", "enabled", "false", _Persist=false), + ok = config:set("per_doc_access", "enabled", "false", _Persist = false), {ok, Code, _, _} = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), - ok = config:set("per_doc_access", "enabled", "true", _Persist=false), + ok = config:set("per_doc_access", "enabled", "true", _Persist = false), ?_assertEqual(400, Code). should_not_let_anonymous_user_create_doc(_PortType, Url) -> @@ -193,292 +195,489 @@ should_not_let_anonymous_user_create_doc(_PortType, Url) -> ?_assertEqual(401, Code). should_let_admin_create_doc_with_access(_PortType, Url) -> - {ok, Code, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), ?_assertEqual(201, Code). should_let_admin_create_doc_without_access(_PortType, Url) -> - {ok, Code, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1}"), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1}" + ), ?_assertEqual(201, Code). should_let_user_create_doc_for_themselves(_PortType, Url) -> - {ok, Code, _, _} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), ?_assertEqual(201, Code). should_not_let_user_create_doc_for_someone_else(_PortType, Url) -> - {ok, Code, _, _} = test_request:put(Url ++ "/db/c", - ?USERY_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/c", + ?USERY_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), ?_assertEqual(403, Code). should_let_user_create_access_ddoc(_PortType, Url) -> - {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/dx", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/dx", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), ?_assertEqual(201, Code). access_ddoc_should_have_no_effects(_PortType, Url) -> ?_test(begin - Ddoc = "{ \"_access\":[\"x\"], \"validate_doc_update\": \"function(newDoc, oldDoc, userCtx) { throw({unauthorized: 'throw error'})}\", \"views\": { \"foo\": { \"map\": \"function(doc) { emit(doc._id) }\" } }, \"shows\": { \"boo\": \"function() {}\" }, \"lists\": { \"hoo\": \"function() {}\" }, \"update\": { \"goo\": \"function() {}\" }, \"filters\": { \"loo\": \"function() {}\" } }", - {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/dx", - ?USERX_REQ_HEADERS, Ddoc), + Ddoc = + "{ \"_access\":[\"x\"], \"validate_doc_update\": \"function(newDoc, oldDoc, userCtx) { throw({unauthorized: 'throw error'})}\", \"views\": { \"foo\": { \"map\": \"function(doc) { emit(doc._id) }\" } }, \"shows\": { \"boo\": \"function() {}\" }, \"lists\": { \"hoo\": \"function() {}\" }, \"update\": { \"goo\": \"function() {}\" }, \"filters\": { \"loo\": \"function() {}\" } }", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/dx", + ?USERX_REQ_HEADERS, + Ddoc + ), ?assertEqual(201, Code), - {ok, Code1, _, _} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, Code1, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), ?assertEqual(201, Code1), - {ok, Code2, _, _} = test_request:get(Url ++ "/db/_design/dx/_view/foo", - ?USERX_REQ_HEADERS), + {ok, Code2, _, _} = test_request:get( + Url ++ "/db/_design/dx/_view/foo", + ?USERX_REQ_HEADERS + ), ?assertEqual(404, Code2), - {ok, Code3, _, _} = test_request:get(Url ++ "/db/_design/dx/_show/boo/b", - ?USERX_REQ_HEADERS), + {ok, Code3, _, _} = test_request:get( + Url ++ "/db/_design/dx/_show/boo/b", + ?USERX_REQ_HEADERS + ), ?assertEqual(404, Code3), - {ok, Code4, _, _} = test_request:get(Url ++ "/db/_design/dx/_list/hoo/foo", - ?USERX_REQ_HEADERS), + {ok, Code4, _, _} = test_request:get( + Url ++ "/db/_design/dx/_list/hoo/foo", + ?USERX_REQ_HEADERS + ), ?assertEqual(404, Code4), - {ok, Code5, _, _} = test_request:post(Url ++ "/db/_design/dx/_update/goo", - ?USERX_REQ_HEADERS, ""), + {ok, Code5, _, _} = test_request:post( + Url ++ "/db/_design/dx/_update/goo", + ?USERX_REQ_HEADERS, + "" + ), ?assertEqual(404, Code5), - {ok, Code6, _, _} = test_request:get(Url ++ "/db/_changes?filter=dx/loo", - ?USERX_REQ_HEADERS), + {ok, Code6, _, _} = test_request:get( + Url ++ "/db/_changes?filter=dx/loo", + ?USERX_REQ_HEADERS + ), ?assertEqual(404, Code6), - {ok, Code7, _, _} = test_request:get(Url ++ "/db/_changes?filter=_view&view=dx/foo", - ?USERX_REQ_HEADERS), + {ok, Code7, _, _} = test_request:get( + Url ++ "/db/_changes?filter=_view&view=dx/foo", + ?USERX_REQ_HEADERS + ), ?assertEqual(404, Code7) end). % Doc updates users_with_access_can_update_doc(_PortType, Url) -> - {ok, _, _, Body} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), {Json} = jiffy:decode(Body), Rev = couch_util:get_value(<<"rev">>, Json), - {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", ?USERX_REQ_HEADERS, - "{\"a\":2,\"_access\":[\"x\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + "{\"a\":2,\"_access\":[\"x\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), ?_assertEqual(201, Code). users_without_access_can_not_update_doc(_PortType, Url) -> - {ok, _, _, Body} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), {Json} = jiffy:decode(Body), Rev = couch_util:get_value(<<"rev">>, Json), - {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", ?USERY_REQ_HEADERS, - "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), ?_assertEqual(403, Code). users_with_access_can_not_change_access(_PortType, Url) -> - {ok, _, _, Body} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), {Json} = jiffy:decode(Body), Rev = couch_util:get_value(<<"rev">>, Json), - {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", ?USERX_REQ_HEADERS, - "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), ?_assertEqual(403, Code). users_with_access_can_not_remove_access(_PortType, Url) -> - {ok, _, _, Body} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), {Json} = jiffy:decode(Body), Rev = couch_util:get_value(<<"rev">>, Json), - {ok, Code, _, _} = test_request:put(Url ++ "/db/b", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", ?USERX_REQ_HEADERS, - "{\"a\":2,\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}"), + "{\"a\":2,\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), ?_assertEqual(403, Code). % Doc reads should_let_admin_read_doc_with_access(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:get(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS + ), ?_assertEqual(200, Code). user_with_access_can_read_doc(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:get(Url ++ "/db/a", - ?USERX_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), ?_assertEqual(200, Code). user_with_access_can_not_read_conflicted_doc(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"_id\":\"f1\",\"a\":1,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/a?new_edits=false", - ?ADMIN_REQ_HEADERS, "{\"_id\":\"f1\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:get(Url ++ "/db/a", - ?USERX_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"f1\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a?new_edits=false", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"f1\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), ?_assertEqual(403, Code). admin_with_access_can_read_conflicted_doc(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"_id\":\"a\",\"a\":1,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/a?new_edits=false", - ?ADMIN_REQ_HEADERS, "{\"_id\":\"a\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:get(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"a\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a?new_edits=false", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"a\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS + ), ?_assertEqual(200, Code). user_without_access_can_not_read_doc(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:get(Url ++ "/db/a", - ?USERY_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERY_REQ_HEADERS + ), ?_assertEqual(403, Code). user_can_not_read_doc_without_access(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1}"), - {ok, Code, _, _} = test_request:get(Url ++ "/db/a", - ?USERX_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), ?_assertEqual(403, Code). % Doc deletes should_let_admin_delete_doc_with_access(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:delete(Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", - ?ADMIN_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:delete( + Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?ADMIN_REQ_HEADERS + ), ?_assertEqual(200, Code). should_let_user_delete_doc_for_themselves(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:get(Url ++ "/db/a", - ?USERX_REQ_HEADERS), - {ok, Code, _, _} = test_request:delete(Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", - ?USERX_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), + {ok, Code, _, _} = test_request:delete( + Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?USERX_REQ_HEADERS + ), ?_assertEqual(200, Code). should_not_let_user_delete_doc_for_someone_else(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?USERX_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, Code, _, _} = test_request:delete(Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", - ?USERY_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:delete( + Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?USERY_REQ_HEADERS + ), ?_assertEqual(403, Code). % _all_docs with include_docs should_let_admin_fetch_all_docs(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/d", - ?ADMIN_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), - {ok, 200, _, Body} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(Body), ?_assertEqual(4, proplists:get_value(<<"total_rows">>, Json)). should_let_user_fetch_their_own_all_docs(_PortType, Url) -> ?_test(begin - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/b", - ?USERX_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/d", - ?USERY_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), - {ok, 200, _, Body} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", - ?USERX_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?USERY_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?USERX_REQ_HEADERS + ), {Json} = jiffy:decode(Body), Rows = proplists:get_value(<<"rows">>, Json), - ?assertEqual([{[{<<"id">>,<<"a">>}, - {<<"key">>,<<"a">>}, - {<<"value">>,<<"1-23202479633c2b380f79507a776743d5">>}, - {<<"doc">>, - {[{<<"_id">>,<<"a">>}, - {<<"_rev">>,<<"1-23202479633c2b380f79507a776743d5">>}, - {<<"a">>,1}, - {<<"_access">>,[<<"x">>]}]}}]}, - {[{<<"id">>,<<"b">>}, - {<<"key">>,<<"b">>}, - {<<"value">>,<<"1-d33fb05384fa65a8081da2046595de0f">>}, - {<<"doc">>, - {[{<<"_id">>,<<"b">>}, - {<<"_rev">>,<<"1-d33fb05384fa65a8081da2046595de0f">>}, - {<<"b">>,2}, - {<<"_access">>,[<<"x">>]}]}}]}], Rows), + ?assertEqual( + [ + {[ + {<<"id">>, <<"a">>}, + {<<"key">>, <<"a">>}, + {<<"value">>, <<"1-23202479633c2b380f79507a776743d5">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"a">>}, + {<<"_rev">>, <<"1-23202479633c2b380f79507a776743d5">>}, + {<<"a">>, 1}, + {<<"_access">>, [<<"x">>]} + ]}} + ]}, + {[ + {<<"id">>, <<"b">>}, + {<<"key">>, <<"b">>}, + {<<"value">>, <<"1-d33fb05384fa65a8081da2046595de0f">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"b">>}, + {<<"_rev">>, <<"1-d33fb05384fa65a8081da2046595de0f">>}, + {<<"b">>, 2}, + {<<"_access">>, [<<"x">>]} + ]}} + ]} + ], + Rows + ), ?assertEqual(2, length(Rows)), ?assertEqual(4, proplists:get_value(<<"total_rows">>, Json)), - {ok, 200, _, Body1} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", - ?USERY_REQ_HEADERS), + {ok, 200, _, Body1} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?USERY_REQ_HEADERS + ), {Json1} = jiffy:decode(Body1), - ?assertEqual( [{<<"total_rows">>,4}, - {<<"offset">>,2}, - {<<"rows">>, - [{[{<<"id">>,<<"c">>}, - {<<"key">>,<<"c">>}, - {<<"value">>,<<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, - {<<"doc">>, - {[{<<"_id">>,<<"c">>}, - {<<"_rev">>,<<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, - {<<"c">>,3}, - {<<"_access">>,[<<"y">>]}]}}]}, - {[{<<"id">>,<<"d">>}, - {<<"key">>,<<"d">>}, - {<<"value">>,<<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, - {<<"doc">>, - {[{<<"_id">>,<<"d">>}, - {<<"_rev">>,<<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, - {<<"d">>,4}, - {<<"_access">>,[<<"y">>]}]}}]}]}], Json1) + ?assertEqual( + [ + {<<"total_rows">>, 4}, + {<<"offset">>, 2}, + {<<"rows">>, [ + {[ + {<<"id">>, <<"c">>}, + {<<"key">>, <<"c">>}, + {<<"value">>, <<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"c">>}, + {<<"_rev">>, <<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, + {<<"c">>, 3}, + {<<"_access">>, [<<"y">>]} + ]}} + ]}, + {[ + {<<"id">>, <<"d">>}, + {<<"key">>, <<"d">>}, + {<<"value">>, <<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"d">>}, + {<<"_rev">>, <<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, + {<<"d">>, 4}, + {<<"_access">>, [<<"y">>]} + ]}} + ]} + ]} + ], + Json1 + ) end). - % _changes should_let_admin_fetch_changes(_PortType, Url) -> - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/d", - ?ADMIN_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), - {ok, 200, _, Body} = test_request:get(Url ++ "/db/_changes", - ?ADMIN_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_changes", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(Body), AmountOfDocs = length(proplists:get_value(<<"results">>, Json)), ?_assertEqual(4, AmountOfDocs). should_let_user_fetch_their_own_changes(_PortType, Url) -> ?_test(begin - {ok, 201, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), - {ok, 201, _, _} = test_request:put(Url ++ "/db/d", - ?ADMIN_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), - {ok, 200, _, Body} = test_request:get(Url ++ "/db/_changes", - ?USERX_REQ_HEADERS), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_changes", + ?USERX_REQ_HEADERS + ), {Json} = jiffy:decode(Body), - ?assertMatch([{<<"results">>, - [{[{<<"seq">>, - <<"2-", _/binary>>}, - {<<"id">>,<<"a">>}, - {<<"changes">>, - [{[{<<"rev">>,<<"1-23202479633c2b380f79507a776743d5">>}]}]}]}, - {[{<<"seq">>, - <<"3-", _/binary>>}, - {<<"id">>,<<"b">>}, - {<<"changes">>, - [{[{<<"rev">>,<<"1-d33fb05384fa65a8081da2046595de0f">>}]}]}]}]}, - {<<"last_seq">>, - <<"3-", _/binary>>}, - {<<"pending">>,2}], Json), + ?assertMatch( + [ + {<<"results">>, [ + {[ + {<<"seq">>, <<"2-", _/binary>>}, + {<<"id">>, <<"a">>}, + {<<"changes">>, [{[{<<"rev">>, <<"1-23202479633c2b380f79507a776743d5">>}]}]} + ]}, + {[ + {<<"seq">>, <<"3-", _/binary>>}, + {<<"id">>, <<"b">>}, + {<<"changes">>, [{[{<<"rev">>, <<"1-d33fb05384fa65a8081da2046595de0f">>}]}]} + ]} + ]}, + {<<"last_seq">>, <<"3-", _/binary>>}, + {<<"pending">>, 2} + ], + Json + ), AmountOfDocs = length(proplists:get_value(<<"results">>, Json)), ?assertEqual(2, AmountOfDocs) end). @@ -487,38 +686,58 @@ should_let_user_fetch_their_own_changes(_PortType, Url) -> should_not_allow_admin_access_ddoc_view_request(_PortType, Url) -> DDoc = "{\"a\":1,\"_access\":[\"x\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", - {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", - ?ADMIN_REQ_HEADERS, DDoc), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), ?assertEqual(201, Code), - {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", - ?ADMIN_REQ_HEADERS), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?ADMIN_REQ_HEADERS + ), ?_assertEqual(404, Code1). should_not_allow_user_access_ddoc_view_request(_PortType, Url) -> DDoc = "{\"a\":1,\"_access\":[\"x\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", - {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", - ?ADMIN_REQ_HEADERS, DDoc), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), ?assertEqual(201, Code), - {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", - ?USERX_REQ_HEADERS), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?USERX_REQ_HEADERS + ), ?_assertEqual(404, Code1). should_allow_admin_users_access_ddoc_view_request(_PortType, Url) -> DDoc = "{\"a\":1,\"_access\":[\"_users\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", - {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", - ?ADMIN_REQ_HEADERS, DDoc), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), ?assertEqual(201, Code), - {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", - ?ADMIN_REQ_HEADERS), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?ADMIN_REQ_HEADERS + ), ?_assertEqual(200, Code1). should_allow_user_users_access_ddoc_view_request(_PortType, Url) -> DDoc = "{\"a\":1,\"_access\":[\"_users\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", - {ok, Code, _, _} = test_request:put(Url ++ "/db/_design/a", - ?ADMIN_REQ_HEADERS, DDoc), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), ?assertEqual(201, Code), - {ok, Code1, _, _} = test_request:get(Url ++ "/db/_design/a/_view/foo", - ?USERX_REQ_HEADERS), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?USERX_REQ_HEADERS + ), ?_assertEqual(200, Code1). % replication @@ -526,28 +745,47 @@ should_allow_user_users_access_ddoc_view_request(_PortType, Url) -> should_allow_admin_to_replicate_from_access_to_access(_PortType, Url) -> ?_test(begin % create target db - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1&access=true", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1&access=true", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), % replicate AdminUrl = string:replace(Url, "http://", "http://a:a@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, - {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -560,16 +798,18 @@ should_allow_admin_to_replicate_from_access_to_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(3, MissingChecked), ?assertEqual(3, MissingFound), ?assertEqual(3, DocsReard), ?assertEqual(3, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) end). @@ -577,28 +817,47 @@ should_allow_admin_to_replicate_from_access_to_access(_PortType, Url) -> should_allow_admin_to_replicate_from_no_access_to_access(_PortType, Url) -> ?_test(begin % create target db - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db2/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), % replicate AdminUrl = string:replace(Url, "http://", "http://a:a@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, - {<<"target">>, list_to_binary(AdminUrl ++ "/db")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -611,16 +870,18 @@ should_allow_admin_to_replicate_from_no_access_to_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(3, MissingChecked), ?assertEqual(3, MissingFound), ?assertEqual(3, DocsReard), ?assertEqual(3, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) end). @@ -628,28 +889,47 @@ should_allow_admin_to_replicate_from_no_access_to_access(_PortType, Url) -> should_allow_admin_to_replicate_from_access_to_no_access(_PortType, Url) -> ?_test(begin % create target db - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), % replicate AdminUrl = string:replace(Url, "http://", "http://a:a@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, - {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -662,16 +942,18 @@ should_allow_admin_to_replicate_from_access_to_no_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(3, MissingChecked), ?assertEqual(3, MissingFound), ?assertEqual(3, DocsReard), ?assertEqual(3, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) end). @@ -679,34 +961,59 @@ should_allow_admin_to_replicate_from_access_to_no_access(_PortType, Url) -> should_allow_admin_to_replicate_from_no_access_to_no_access(_PortType, Url) -> ?_test(begin % create source and target dbs - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), - - {ok, 201, _, _} = test_request:put(url() ++ "/db3?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + {ok, 201, _, _} = test_request:put( + url() ++ "/db3?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db3/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db3/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db2/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"x\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), % replicate AdminUrl = string:replace(Url, "http://", "http://a:a@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, - {<<"target">>, list_to_binary(AdminUrl ++ "/db3")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?ADMIN_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db3")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -719,16 +1026,18 @@ should_allow_admin_to_replicate_from_no_access_to_no_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(3, MissingChecked), ?assertEqual(3, MissingFound), ?assertEqual(3, DocsReard), ?assertEqual(3, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db3/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db3/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) end). @@ -736,28 +1045,47 @@ should_allow_admin_to_replicate_from_no_access_to_no_access(_PortType, Url) -> should_allow_user_to_replicate_from_access_to_access(_PortType, Url) -> ?_test(begin % create source and target dbs - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1&access=true", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1&access=true", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), % replicate UserXUrl = string:replace(Url, "http://", "http://x:x@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, - {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % ?debugFmt("~nResponseBody: ~p~n", [ResponseBody]), % assert replication status @@ -772,28 +1100,34 @@ should_allow_user_to_replicate_from_access_to_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(2, MissingChecked), ?assertEqual(2, MissingFound), ?assertEqual(2, DocsReard), ?assertEqual(2, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert access in local doc ReplicationId = couch_util:get_value(<<"replication_id">>, EJResponseBody), - {ok, 200, _, CheckPoint} = test_request:get(Url ++ "/db/_local/" ++ ReplicationId, - ?USERX_REQ_HEADERS), + {ok, 200, _, CheckPoint} = test_request:get( + Url ++ "/db/_local/" ++ ReplicationId, + ?USERX_REQ_HEADERS + ), {EJCheckPoint} = jiffy:decode(CheckPoint), Access = couch_util:get_value(<<"_access">>, EJCheckPoint), ?assertEqual([<<"x">>], Access), % make sure others can’t read our local docs - {ok, 403, _, _} = test_request:get(Url ++ "/db/_local/" ++ ReplicationId, - ?USERY_REQ_HEADERS), + {ok, 403, _, _} = test_request:get( + Url ++ "/db/_local/" ++ ReplicationId, + ?USERY_REQ_HEADERS + ), % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) end). @@ -801,28 +1135,47 @@ should_allow_user_to_replicate_from_access_to_access(_PortType, Url) -> should_allow_user_to_replicate_from_access_to_no_access(_PortType, Url) -> ?_test(begin % create source and target dbs - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), % replicate UserXUrl = string:replace(Url, "http://", "http://x:x@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, - {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -835,16 +1188,18 @@ should_allow_user_to_replicate_from_access_to_no_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(2, MissingChecked), ?assertEqual(2, MissingFound), ?assertEqual(2, DocsReard), ?assertEqual(2, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db2/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) end). @@ -852,11 +1207,17 @@ should_allow_user_to_replicate_from_access_to_no_access(_PortType, Url) -> should_allow_user_to_replicate_from_no_access_to_access(_PortType, Url) -> ?_test(begin % create source and target dbs - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % leave for easier debugging % VduFun = <<"function(newdoc, olddoc, userctx) {if(newdoc._id == \"b\") throw({'forbidden':'fail'})}">>, @@ -867,22 +1228,34 @@ should_allow_user_to_replicate_from_no_access_to_access(_PortType, Url) -> % {ok, _, _, _} = test_request:put(Url ++ "/db/_design/vdu", % ?ADMIN_REQ_HEADERS, jiffy:encode(DDoc)), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db2/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), - + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), % replicate UserXUrl = string:replace(Url, "http://", "http://x:x@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, - {<<"target">>, list_to_binary(UserXUrl ++ "/db")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -895,16 +1268,18 @@ should_allow_user_to_replicate_from_no_access_to_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(3, MissingChecked), ?assertEqual(3, MissingFound), ?assertEqual(3, DocsReard), ?assertEqual(2, DocsWritten), ?assertEqual(1, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) end). @@ -912,33 +1287,58 @@ should_allow_user_to_replicate_from_no_access_to_access(_PortType, Url) -> should_allow_user_to_replicate_from_no_access_to_no_access(_PortType, Url) -> ?_test(begin % create source and target dbs - {ok, 201, _, _} = test_request:put(url() ++ "/db2?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db2/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), - - {ok, 201, _, _} = test_request:put(url() ++ "/db3?q=1&n=1", - ?ADMIN_REQ_HEADERS, ""), + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + {ok, 201, _, _} = test_request:put( + url() ++ "/db3?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), % set target db security - {ok, _, _, _} = test_request:put(url() ++ "/db3/_security", - ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT)), + {ok, _, _, _} = test_request:put( + url() ++ "/db3/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), % create source docs - {ok, _, _, _} = test_request:put(Url ++ "/db2/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db2/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), % replicate UserXUrl = string:replace(Url, "http://", "http://x:x@"), - EJRequestBody = {[ - {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, - {<<"target">>, list_to_binary(UserXUrl ++ "/db3")} - ]}, - {ok, ResponseCode, _, ResponseBody} = test_request:post(Url ++ "/_replicate", - ?USERX_REQ_HEADERS, jiffy:encode(EJRequestBody)), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db3")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), % assert replication status {EJResponseBody} = jiffy:decode(ResponseBody), @@ -951,72 +1351,97 @@ should_allow_user_to_replicate_from_no_access_to_no_access(_PortType, Url) -> DocsReard = couch_util:get_value(<<"docs_read">>, History), DocsWritten = couch_util:get_value(<<"docs_written">>, History), DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), - + ?assertEqual(3, MissingChecked), ?assertEqual(3, MissingFound), ?assertEqual(3, DocsReard), ?assertEqual(3, DocsWritten), ?assertEqual(0, DocWriteFailures), - + % assert docs in target db - {ok, 200, _, ADBody} = test_request:get(Url ++ "/db3/_all_docs?include_docs=true", - ?ADMIN_REQ_HEADERS), + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db3/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), {Json} = jiffy:decode(ADBody), ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) end). % revs_diff should_not_allow_user_to_revs_diff_other_docs(_PortType, Url) -> - ?_test(begin - % create test docs - {ok, _, _, _} = test_request:put(Url ++ "/db/a", - ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), - {ok, _, _, _} = test_request:put(Url ++ "/db/b", - ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), - {ok, _, _, V} = test_request:put(Url ++ "/db/c", - ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), - - % nothing missing - RevsDiff = {[ - {<<"a">>, [ - <<"1-23202479633c2b380f79507a776743d5">> - ]} - ]}, - {ok, GoodCode, _, GoodBody} = test_request:post(Url ++ "/db/_revs_diff", - ?USERX_REQ_HEADERS, jiffy:encode(RevsDiff)), - EJGoodBody = jiffy:decode(GoodBody), - ?assertEqual(200, GoodCode), - ?assertEqual({[]}, EJGoodBody), - - % something missing - MissingRevsDiff = {[ - {<<"a">>, [ - <<"1-missing">> - ]} - ]}, - {ok, MissingCode, _, MissingBody} = test_request:post(Url ++ "/db/_revs_diff", - ?USERX_REQ_HEADERS, jiffy:encode(MissingRevsDiff)), - EJMissingBody = jiffy:decode(MissingBody), - ?assertEqual(200, MissingCode), - MissingExpect = {[ - {<<"a">>, {[ - {<<"missing">>, [<<"1-missing">>]} - ]}} - ]}, - ?assertEqual(MissingExpect, EJMissingBody), - - % other doc - OtherRevsDiff = {[ - {<<"c">>, [ - <<"1-92aef5b0e4a3f4db0aba1320869bc95d">> - ]} - ]}, - {ok, OtherCode, _, OtherBody} = test_request:post(Url ++ "/db/_revs_diff", - ?USERX_REQ_HEADERS, jiffy:encode(OtherRevsDiff)), - EJOtherBody = jiffy:decode(OtherBody), - ?assertEqual(200, OtherCode), - ?assertEqual({[]}, EJOtherBody) - end). + ?_test(begin + % create test docs + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, V} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + + % nothing missing + RevsDiff = + {[ + {<<"a">>, [ + <<"1-23202479633c2b380f79507a776743d5">> + ]} + ]}, + {ok, GoodCode, _, GoodBody} = test_request:post( + Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, + jiffy:encode(RevsDiff) + ), + EJGoodBody = jiffy:decode(GoodBody), + ?assertEqual(200, GoodCode), + ?assertEqual({[]}, EJGoodBody), + + % something missing + MissingRevsDiff = + {[ + {<<"a">>, [ + <<"1-missing">> + ]} + ]}, + {ok, MissingCode, _, MissingBody} = test_request:post( + Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, + jiffy:encode(MissingRevsDiff) + ), + EJMissingBody = jiffy:decode(MissingBody), + ?assertEqual(200, MissingCode), + MissingExpect = + {[ + {<<"a">>, + {[ + {<<"missing">>, [<<"1-missing">>]} + ]}} + ]}, + ?assertEqual(MissingExpect, EJMissingBody), + + % other doc + OtherRevsDiff = + {[ + {<<"c">>, [ + <<"1-92aef5b0e4a3f4db0aba1320869bc95d">> + ]} + ]}, + {ok, OtherCode, _, OtherBody} = test_request:post( + Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, + jiffy:encode(OtherRevsDiff) + ), + EJOtherBody = jiffy:decode(OtherBody), + ?assertEqual(200, OtherCode), + ?assertEqual({[]}, EJOtherBody) + end). %% ------------------------------------------------------------------ %% Internal Function Definitions %% ------------------------------------------------------------------ diff --git a/src/couch/test/eunit/couchdb_update_conflicts_tests.erl b/src/couch/test/eunit/couchdb_update_conflicts_tests.erl index 953ddd7033b..d75b335b149 100644 --- a/src/couch/test/eunit/couchdb_update_conflicts_tests.erl +++ b/src/couch/test/eunit/couchdb_update_conflicts_tests.erl @@ -18,7 +18,8 @@ -define(i2l(I), integer_to_list(I)). -define(DOC_ID, <<"foobar">>). -define(LOCAL_DOC_ID, <<"_local/foobar">>). --define(NUM_CLIENTS, [100, 500 ]). % TODO: enable 1000, 2000, 5000, 10000]). +% TODO: enable 1000, 2000, 5000, 10000]). +-define(NUM_CLIENTS, [100, 500]). -define(TIMEOUT, 200000). start() -> diff --git a/src/couch_index/src/couch_index_updater.erl b/src/couch_index/src/couch_index_updater.erl index 66d76062200..ab84b095555 100644 --- a/src/couch_index/src/couch_index_updater.erl +++ b/src/couch_index/src/couch_index_updater.erl @@ -124,7 +124,7 @@ update(Idx, Mod, IdxState) -> DocOpts = case lists:member(local_seq, UpdateOpts) of true -> [conflicts, deleted_conflicts, local_seq, deleted]; - _ -> [conflicts, deleted_conflicts,local_seq, deleted] + _ -> [conflicts, deleted_conflicts, local_seq, deleted] end, couch_util:with_db(DbName, fun(Db) -> @@ -142,9 +142,9 @@ update(Idx, Mod, IdxState) -> end, GetInfo = fun - (#full_doc_info{id=Id, update_seq=Seq, deleted=Del,access=Access}=FDI) -> + (#full_doc_info{id = Id, update_seq = Seq, deleted = Del, access = Access} = FDI) -> {Id, Seq, Del, couch_doc:to_doc_info(FDI), Access}; - (#doc_info{id=Id, high_seq=Seq, revs=[RI|_],access=Access}=DI) -> + (#doc_info{id = Id, high_seq = Seq, revs = [RI | _], access = Access} = DI) -> {Id, Seq, RI#rev_info.deleted, DI, Access} end, @@ -155,19 +155,20 @@ update(Idx, Mod, IdxState) -> {false, <<"_design/", _/binary>>} -> {nil, Seq}; _ -> - case IndexName of % TODO: move into outer case statement + % TODO: move into outer case statement + case IndexName of <<"_design/_access">> -> {ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts), % TODO: hande conflicted docs in _access index % probably remove - [RevInfo|_] = DocInfo#doc_info.revs, + [RevInfo | _] = DocInfo#doc_info.revs, Doc1 = Doc#doc{ meta = [{body_sp, RevInfo#rev_info.body_sp}], access = Access }, {Doc1, Seq}; _ when Deleted -> - {#doc{id=DocId, deleted=true}, Seq}; + {#doc{id = DocId, deleted = true}, Seq}; _ -> {ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts), {Doc, Seq} diff --git a/src/couch_index/src/couch_index_util.erl b/src/couch_index/src/couch_index_util.erl index 47133db0f14..beb0f556910 100644 --- a/src/couch_index/src/couch_index_util.erl +++ b/src/couch_index/src/couch_index_util.erl @@ -31,7 +31,7 @@ index_file(Module, DbName, FileName) -> load_doc(Db, #doc_info{} = DI, Opts) -> Deleted = lists:member(deleted, Opts), - % MyDoc = , + % MyDoc = , %{ok, MyDoc2} = MyDoc, %couch_log:error("~ncouch_index_util:load_doc(): Doc: ~p, Deleted ~p~n", [MyDoc2, MyDoc2#doc.deleted]), case catch (couch_db:open_doc(Db, DI, Opts)) of diff --git a/src/couch_mrview/src/couch_mrview.erl b/src/couch_mrview/src/couch_mrview.erl index 79b2b8bec90..2cb74fac4ca 100644 --- a/src/couch_mrview/src/couch_mrview.erl +++ b/src/couch_mrview/src/couch_mrview.erl @@ -266,22 +266,27 @@ query_all_docs(Db, Args0, Callback, Acc) -> access_ddoc() -> #doc{ id = <<"_design/_access">>, - body = {[ - {<<"language">>,<<"_access">>}, - {<<"options">>, {[ - {<<"include_design">>, true} - ]}}, - {<<"views">>, {[ - {<<"_access_by_id">>, {[ - {<<"map">>, <<"_access/by-id-map">>}, - {<<"reduce">>, <<"_count">>} - ]}}, - {<<"_access_by_seq">>, {[ - {<<"map">>, <<"_access/by-seq-map">>}, - {<<"reduce">>, <<"_count">>} - ]}} - ]}} - ]} + body = + {[ + {<<"language">>, <<"_access">>}, + {<<"options">>, + {[ + {<<"include_design">>, true} + ]}}, + {<<"views">>, + {[ + {<<"_access_by_id">>, + {[ + {<<"map">>, <<"_access/by-id-map">>}, + {<<"reduce">>, <<"_count">>} + ]}}, + {<<"_access_by_seq">>, + {[ + {<<"map">>, <<"_access/by-seq-map">>}, + {<<"reduce">>, <<"_count">>} + ]}} + ]}} + ]} }. query_changes_access(Db, StartSeq, Fun, Options, Acc) -> DDoc = access_ddoc(), @@ -289,15 +294,16 @@ query_changes_access(Db, StartSeq, Fun, Options, Acc) -> UserName = UserCtx#user_ctx.name, %% % TODO: add roles Args1 = prefix_startkey_endkey(UserName, #mrargs{}, fwd), - Args2 = Args1#mrargs{deleted=true}, - Args = Args2#mrargs{reduce=false}, + Args2 = Args1#mrargs{deleted = true}, + Args = Args2#mrargs{reduce = false}, %% % filter out the user-prefix from the key, so _all_docs looks normal %% % this isn’t a separate function because I’m binding Callback0 and I don’t %% % know the Erlang equivalent of JS’s fun.bind(this, newarg) Callback = fun - ({meta, _}, Acc0) -> - {ok, Acc0}; % ignore for now - ({row, Props}, Acc0) -> + ({meta, _}, Acc0) -> + % ignore for now + {ok, Acc0}; + ({row, Props}, Acc0) -> % turn row into FDI Value = couch_util:get_value(value, Props), [Owner, Seq] = couch_util:get_value(key, Props), @@ -307,7 +313,16 @@ query_changes_access(Db, StartSeq, Fun, Options, Acc) -> [Pos, RevId] = string:split(?b2l(Rev), "-"), FDI = #full_doc_info{ id = proplists:get_value(id, Props), - rev_tree = [{list_to_integer(Pos), {?l2b(RevId), #leaf{deleted=Deleted, ptr=BodySp, seq=Seq, sizes=#size_info{}}, []}}], + rev_tree = [ + { + list_to_integer(Pos), + { + ?l2b(RevId), + #leaf{deleted = Deleted, ptr = BodySp, seq = Seq, sizes = #size_info{}}, + [] + } + } + ], deleted = Deleted, update_seq = 0, sizes = #size_info{}, @@ -315,8 +330,9 @@ query_changes_access(Db, StartSeq, Fun, Options, Acc) -> }, Fun(FDI, Acc0); (_Else, Acc0) -> - {ok, Acc0} % ignore for now - end, + % ignore for now + {ok, Acc0} + end, VName = <<"_access_by_seq">>, query_view(Db, DDoc, VName, Args, Callback, Acc). @@ -327,7 +343,7 @@ query_all_docs_access(Db, Args0, Callback0, Acc) -> UserCtx = couch_db:get_user_ctx(Db), UserName = UserCtx#user_ctx.name, Args1 = prefix_startkey_endkey(UserName, Args0, Args0#mrargs.direction), - Args = Args1#mrargs{reduce=false, extra=Args1#mrargs.extra ++ [{all_docs_access, true}]}, + Args = Args1#mrargs{reduce = false, extra = Args1#mrargs.extra ++ [{all_docs_access, true}]}, Callback = fun ({row, Props}, Acc0) -> % filter out the user-prefix from the key, so _all_docs looks normal @@ -339,34 +355,37 @@ query_all_docs_access(Db, Args0, Callback0, Acc) -> Callback0({row, Row}, Acc0); (Row, Acc0) -> Callback0(Row, Acc0) - end, + end, VName = <<"_access_by_id">>, query_view(Db, DDoc, VName, Args, Callback, Acc). prefix_startkey_endkey(UserName, Args, fwd) -> - #mrargs{start_key=StartKey, end_key=EndKey} = Args, - Args#mrargs { - start_key = case StartKey of - undefined -> [UserName]; - StartKey -> [UserName, StartKey] - end, - end_key = case EndKey of - undefined -> [UserName, {}]; - EndKey -> [UserName, EndKey, {}] - end + #mrargs{start_key = StartKey, end_key = EndKey} = Args, + Args#mrargs{ + start_key = + case StartKey of + undefined -> [UserName]; + StartKey -> [UserName, StartKey] + end, + end_key = + case EndKey of + undefined -> [UserName, {}]; + EndKey -> [UserName, EndKey, {}] + end }; - prefix_startkey_endkey(UserName, Args, rev) -> - #mrargs{start_key=StartKey, end_key=EndKey} = Args, - Args#mrargs { - end_key = case StartKey of - undefined -> [UserName]; - StartKey -> [UserName, StartKey] - end, - start_key = case EndKey of - undefined -> [UserName, {}]; - EndKey -> [UserName, EndKey, {}] - end + #mrargs{start_key = StartKey, end_key = EndKey} = Args, + Args#mrargs{ + end_key = + case StartKey of + undefined -> [UserName]; + StartKey -> [UserName, StartKey] + end, + start_key = + case EndKey of + undefined -> [UserName, {}]; + EndKey -> [UserName, EndKey, {}] + end }. query_all_docs_admin(Db, Args0, Callback, Acc) -> Sig = couch_util:with_db(Db, fun(WDb) -> diff --git a/src/couch_mrview/src/couch_mrview_updater.erl b/src/couch_mrview/src/couch_mrview_updater.erl index 5d58ab05d74..83d21c0364e 100644 --- a/src/couch_mrview/src/couch_mrview_updater.erl +++ b/src/couch_mrview/src/couch_mrview_updater.erl @@ -176,36 +176,38 @@ map_docs(Parent, #mrst{db_name = DbName, idx_name = IdxName} = State0) -> DocFun = fun ({nil, Seq, _}, {SeqAcc, Results}) -> {erlang:max(Seq, SeqAcc), Results}; - ({Id, Seq, Rev, #doc{deleted=true, body=Body, meta=Meta}}, {SeqAcc, Results}) -> - % _access needs deleted docs - case IdxName of - <<"_design/_access">> -> - % splice in seq - {Start, Rev1} = Rev, - Doc = #doc{ - id = Id, - revs = {Start, [Rev1]}, - body = {make_deleted_body(Body, Meta, Seq)}, %% todo: only keep _access and add _seq - deleted = true - }, - {ok, Res} = couch_query_servers:map_doc_raw(QServer, Doc), - {erlang:max(Seq, SeqAcc), [{Id, Seq, Rev, Res} | Results]}; - _Else -> - {erlang:max(Seq, SeqAcc), [{Id, Seq, Rev, []} | Results]} - end; - ({Id, Seq, Doc}, {SeqAcc, Results}) -> - couch_stats:increment_counter([couchdb, mrview, map_doc]), - % IdxName: ~p, Doc: ~p~n~n", [IdxName, Doc]), - Doc0 = case IdxName of + ({Id, Seq, Rev, #doc{deleted = true, body = Body, meta = Meta}}, {SeqAcc, Results}) -> + % _access needs deleted docs + case IdxName of <<"_design/_access">> -> % splice in seq - {Props} = Doc#doc.body, - BodySp = couch_util:get_value(body_sp, Doc#doc.meta), - Doc#doc{ - body = {Props++[{<<"_seq">>, Seq}, {<<"_body_sp">>, BodySp}]} - }; + {Start, Rev1} = Rev, + Doc = #doc{ + id = Id, + revs = {Start, [Rev1]}, + %% todo: only keep _access and add _seq + body = {make_deleted_body(Body, Meta, Seq)}, + deleted = true + }, + {ok, Res} = couch_query_servers:map_doc_raw(QServer, Doc), + {erlang:max(Seq, SeqAcc), [{Id, Seq, Rev, Res} | Results]}; _Else -> - Doc + {erlang:max(Seq, SeqAcc), [{Id, Seq, Rev, []} | Results]} + end; + ({Id, Seq, Doc}, {SeqAcc, Results}) -> + couch_stats:increment_counter([couchdb, mrview, map_doc]), + % IdxName: ~p, Doc: ~p~n~n", [IdxName, Doc]), + Doc0 = + case IdxName of + <<"_design/_access">> -> + % splice in seq + {Props} = Doc#doc.body, + BodySp = couch_util:get_value(body_sp, Doc#doc.meta), + Doc#doc{ + body = {Props ++ [{<<"_seq">>, Seq}, {<<"_body_sp">>, BodySp}]} + }; + _Else -> + Doc end, {ok, Res} = couch_query_servers:map_doc_raw(QServer, Doc0), {erlang:max(Seq, SeqAcc), [{Id, Res} | Results]} diff --git a/src/couch_mrview/src/couch_mrview_util.erl b/src/couch_mrview/src/couch_mrview_util.erl index 7991487ff6a..4bc11997c5d 100644 --- a/src/couch_mrview/src/couch_mrview_util.erl +++ b/src/couch_mrview/src/couch_mrview_util.erl @@ -438,7 +438,7 @@ reduce_to_count(Reductions) -> FinalReduction = couch_btree:final_reduce(CountReduceFun, Reductions), get_count(FinalReduction). -get_access_row_count(#mrview{btree=Bt}, UserName) -> +get_access_row_count(#mrview{btree = Bt}, UserName) -> couch_btree:full_reduce_with_options(Bt, [ {start_key, UserName} ]). diff --git a/src/couch_replicator/src/couch_replicator.erl b/src/couch_replicator/src/couch_replicator.erl index ac3807d1160..e1dd627c754 100644 --- a/src/couch_replicator/src/couch_replicator.erl +++ b/src/couch_replicator/src/couch_replicator.erl @@ -77,13 +77,16 @@ replicate(PostBody, Ctx) -> false -> check_authorization(RepId, UserCtx), {ok, Listener} = rep_result_listener(RepId), - Result = case do_replication_loop(Rep) of % TODO: review why we need this - {ok, {ResultJson}} -> - {PublicRepId, _} = couch_replicator_ids:replication_id(Rep), % TODO: check with options - {ok, {[{<<"replication_id">>, ?l2b(PublicRepId)} | ResultJson]}}; - Else -> - Else - end, + % TODO: review why we need this + Result = + case do_replication_loop(Rep) of + {ok, {ResultJson}} -> + % TODO: check with options + {PublicRepId, _} = couch_replicator_ids:replication_id(Rep), + {ok, {[{<<"replication_id">>, ?l2b(PublicRepId)} | ResultJson]}}; + Else -> + Else + end, couch_replicator_notifier:stop(Listener), Result end. diff --git a/src/couch_replicator/src/couch_replicator_scheduler_job.erl b/src/couch_replicator/src/couch_replicator_scheduler_job.erl index ac979d37cef..bc4708b2209 100644 --- a/src/couch_replicator/src/couch_replicator_scheduler_job.erl +++ b/src/couch_replicator/src/couch_replicator_scheduler_job.erl @@ -859,9 +859,11 @@ do_checkpoint(State) -> try {SrcRevPos, SrcRevId} = update_checkpoint( - Source, SourceLog#doc{body = NewRepHistory}, SrcAccess, UserCtx, source), + Source, SourceLog#doc{body = NewRepHistory}, SrcAccess, UserCtx, source + ), {TgtRevPos, TgtRevId} = update_checkpoint( - Target, TargetLog#doc{body = NewRepHistory}, TgtAccess, UserCtx, target), + Target, TargetLog#doc{body = NewRepHistory}, TgtAccess, UserCtx, target + ), NewState = State#rep_state{ checkpoint_history = NewRepHistory, committed_seq = NewTsSeq, @@ -907,10 +909,11 @@ update_checkpoint(Db, Doc, Access, UserCtx, DbType) -> update_checkpoint(Db, #doc{id = LogId} = Doc0, Access, UserCtx) -> % if db has _access, then: % get userCtx from replication and splice into doc _access - Doc = case Access of - true -> Doc0#doc{access = [UserCtx#user_ctx.name]}; - _False -> Doc0 - end, + Doc = + case Access of + true -> Doc0#doc{access = [UserCtx#user_ctx.name]}; + _False -> Doc0 + end, try case couch_replicator_api_wrap:update_doc(Db, Doc, [delay_commit]) of diff --git a/src/fabric/src/fabric_doc_update.erl b/src/fabric/src/fabric_doc_update.erl index e38887659b9..f782fe3dd3d 100644 --- a/src/fabric/src/fabric_doc_update.erl +++ b/src/fabric/src/fabric_doc_update.erl @@ -423,7 +423,7 @@ doc_update1() -> ?assertEqual( % TODO: find out why we had to swap this - {error, [{Doc2,{error,internal_server_error}},{Doc1,{accepted,"A"}}]}, + {error, [{Doc2, {error, internal_server_error}}, {Doc1, {accepted, "A"}}]}, ReplyW5 ). @@ -454,7 +454,7 @@ doc_update2() -> handle_message({rexi_EXIT, 1}, lists:nth(3, Shards), Acc2), ?assertEqual( - {accepted, [{Doc2,{accepted,Doc2}}, {Doc1,{accepted,Doc1}}]}, + {accepted, [{Doc2, {accepted, Doc2}}, {Doc1, {accepted, Doc1}}]}, Reply ). @@ -483,7 +483,7 @@ doc_update3() -> {stop, Reply} = handle_message({ok, [{ok, Doc1}, {ok, Doc2}]}, lists:nth(3, Shards), Acc2), - ?assertEqual({ok, [{Doc2, {ok,Doc2}},{Doc1, {ok, Doc1}}]},Reply). + ?assertEqual({ok, [{Doc2, {ok, Doc2}}, {Doc1, {ok, Doc1}}]}, Reply). handle_all_dbs_active() -> Doc1 = #doc{revs = {1, [<<"foo">>]}}, From 6d437e339a402321f7e2b85da536badacda8c109 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 20 Aug 2022 11:55:47 +0200 Subject: [PATCH 27/32] chore: remove comments and stale todo entries --- src/chttpd/src/chttpd_view.erl | 14 -------------- src/couch/src/couch_db_updater.erl | 16 +--------------- src/couch/src/couch_doc.erl | 4 ---- 3 files changed, 1 insertion(+), 33 deletions(-) diff --git a/src/chttpd/src/chttpd_view.erl b/src/chttpd/src/chttpd_view.erl index 44459b3cfc9..25c6b6d03a5 100644 --- a/src/chttpd/src/chttpd_view.erl +++ b/src/chttpd/src/chttpd_view.erl @@ -69,20 +69,6 @@ fabric_query_view(Db, Req, DDoc, ViewName, Args) -> Max = chttpd:chunked_response_buffer_size(), VAcc = #vacc{db = Db, req = Req, threshold = Max}, Options = [{user_ctx, Req#httpd.user_ctx}], - % {ok, Resp} = fabric:query_view(Db, Options, DDoc, ViewName, - % fun view_cb/2, VAcc, Args), - % {ok, Resp#vacc.resp}. - % % TODO: This might just be a debugging leftover, we might be able - % % to undo this by just returning {ok, Resp#vacc.resp} - % % However, this *might* be here because we need to handle - % % errors here now, because access might tell us to. - % case fabric:query_view(Db, Options, DDoc, ViewName, - % fun view_cb/2, VAcc, Args) of - % {ok, Resp} -> - % {ok, Resp#vacc.resp}; - % {error, Error} -> - % throw(Error) - % end. {ok, Resp} = fabric:query_view( Db, diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index 02136d83b3c..d62485d3166 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -267,7 +267,7 @@ sort_and_tag_grouped_docs(Client, GroupedDocs) -> % check we sort them again here. See COUCHDB-2735. Cmp = fun % TODO: re-evaluate this addition, might be - ([], []) -> false; + %([], []) -> false; % superflous now ([#doc{id = A} | _], [#doc{id = B} | _]) -> A < B end, @@ -736,14 +736,10 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> %. if invalid, then send_result tagged `access`(c.f. `conflict) %. and don’t add to DLV, nor ODI - %couch_log:notice("~nDb: ~p, UserCtx: ~p~n", [Db, UserCtx]), - {DocsListValidated, OldDocInfosValidated} = validate_docs_access( Db, UserCtx, DocsList, OldDocInfos ), - %couch_log:notice("~nDocsListValidated: ~p, OldDocInfosValidated: ~p~n", [DocsListValidated, OldDocInfosValidated]), - {ok, AccOut} = merge_rev_trees(DocsListValidated, OldDocInfosValidated, AccIn), #merge_acc{ add_infos = NewFullDocInfos, @@ -783,11 +779,6 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> {ok, commit_data(Db1), UpdatedDDocIds}. -% check_access(Db, UserCtx, Access) -> -% check_access(Db, UserCtx, couch_db:has_access_enabled(Db), Access). -% -% check_access(_Db, UserCtx, false, _Access) -> -% true; % at this point, we already validated this Db is access enabled, so do the checks right away. check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx = UserCtx}, Access). @@ -810,11 +801,8 @@ validate_docs_access( % validate Doc % if valid, then put back in Docs % if not, then send_result and skip - %couch_log:notice("~nvalidate_docs_access() UserCtx: ~p, Docs: ~p, OldInfo: ~p~n", [UserCtx, Docs, OldInfo]), NewDocs = lists:foldl( fun({Client, Doc}, Acc) -> - %couch_log:notice("~nvalidate_docs_access lists:foldl() Doc: ~p Doc#doc.access: ~p~n", [Doc, Doc#doc.access]), - % check if we are allowed to update the doc, skip when new doc OldDocMatchesAccess = case OldInfo#full_doc_info.rev_tree of @@ -823,8 +811,6 @@ validate_docs_access( end, NewDocMatchesAccess = check_access(Db, UserCtx, Doc#doc.access), - %couch_log:notice("~nvalidate_docs_access lists:foldl() OldDocMatchesAccess: ~p, NewDocMatchesAccess: ~p, andalso: ~p~n", [OldDocMatchesAccess, NewDocMatchesAccess, OldDocMatchesAccess andalso NewDocMatchesAccess]), - case OldDocMatchesAccess andalso NewDocMatchesAccess of % if valid, then send to DocsListValidated, OldDocsInfo true -> diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl index dec3301d485..acf4994a33b 100644 --- a/src/couch/src/couch_doc.erl +++ b/src/couch/src/couch_doc.erl @@ -50,10 +50,6 @@ to_json_rev(0, []) -> to_json_rev(Start, [FirstRevId | _]) -> [{<<"_rev">>, ?l2b([integer_to_list(Start), "-", revid_to_str(FirstRevId)])}]. -% TODO: remove if we can -% to_json_body(Del, Body) -> -% to_json_body(Del, Body, []). - to_json_body(true, {Body}, []) -> Body ++ [{<<"_deleted">>, true}]; to_json_body(false, {Body}, []) -> From 691343f0692ec6b10cec45d7b71e3a9273ad9218 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 20 Aug 2022 12:38:24 +0200 Subject: [PATCH 28/32] fix(access) elixir tests again --- test/elixir/test/proxyauth_test.exs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/elixir/test/proxyauth_test.exs b/test/elixir/test/proxyauth_test.exs index 2c6e3d530ea..9c9638d5294 100644 --- a/test/elixir/test/proxyauth_test.exs +++ b/test/elixir/test/proxyauth_test.exs @@ -70,7 +70,7 @@ defmodule ProxyAuthTest do ) assert resp2.body["userCtx"]["name"] == "couch@apache.org" - assert resp2.body["userCtx"]["roles"] == ["test_role"] + assert resp2.body["userCtx"]["roles"] == ["_users", "test_role"] assert resp2.body["info"]["authenticated"] == "proxy" assert resp2.body["ok"] == true @@ -124,7 +124,7 @@ defmodule ProxyAuthTest do ) assert resp2.body["userCtx"]["name"] == "couch@apache.org" - assert resp2.body["userCtx"]["roles"] == ["test_role_1", "test_role_2"] + assert resp2.body["userCtx"]["roles"] == ["_users", "test_role_1", "test_role_2"] assert resp2.body["info"]["authenticated"] == "proxy" assert resp2.body["ok"] == true From 5b9e274fc2ae089570dd6be2ff1170b1a70f2c84 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 20 Aug 2022 13:09:29 +0200 Subject: [PATCH 29/32] fix: simplify --- src/couch/src/couch_db_updater.erl | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index d62485d3166..1e5566b210b 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -265,12 +265,7 @@ sort_and_tag_grouped_docs(Client, GroupedDocs) -> % The merge_updates function will fail and the database can end up with % duplicate documents if the incoming groups are not sorted, so as a sanity % check we sort them again here. See COUCHDB-2735. - Cmp = fun - % TODO: re-evaluate this addition, might be - %([], []) -> false; - % superflous now - ([#doc{id = A} | _], [#doc{id = B} | _]) -> A < B - end, + Cmp = fun([#doc{id = A} | _], [#doc{id = B} | _]) -> A < B end, lists:map( fun(DocGroup) -> [{Client, maybe_tag_doc(D)} || D <- DocGroup] @@ -779,7 +774,6 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> {ok, commit_data(Db1), UpdatedDDocIds}. - % at this point, we already validated this Db is access enabled, so do the checks right away. check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx = UserCtx}, Access). From 207cdb38b2c1402ddb7dfbd8767c9924d2864bdc Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 11 Nov 2022 13:44:26 +0100 Subject: [PATCH 30/32] chore: append _users role instead of prepending it --- src/couch/src/couch_httpd_auth.erl | 4 ++-- test/elixir/test/proxyauth_test.exs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/couch/src/couch_httpd_auth.erl b/src/couch/src/couch_httpd_auth.erl index a6cffce39f1..eff387a525b 100644 --- a/src/couch/src/couch_httpd_auth.erl +++ b/src/couch/src/couch_httpd_auth.erl @@ -103,7 +103,7 @@ extract_roles(UserProps) -> Roles = couch_util:get_value(<<"roles">>, UserProps, []), case lists:member(<<"_admin">>, Roles) of true -> Roles; - _ -> [<<"_users">> | Roles] + _ -> Roles ++ [<<"_users">>] end. default_authentication_handler(Req) -> @@ -196,7 +196,7 @@ proxy_auth_user(Req) -> Roles = case header_value(Req, XHeaderRoles) of undefined -> []; - Else -> [<<"_users">> | re:split(Else, "\\s*,\\s*", [trim, {return, binary}])] + Else -> re:split(Else, "\\s*,\\s*", [trim, {return, binary}]) ++ [<<"_users">>] end, case chttpd_util:get_chttpd_auth_config_boolean( diff --git a/test/elixir/test/proxyauth_test.exs b/test/elixir/test/proxyauth_test.exs index 9c9638d5294..0c77abff5eb 100644 --- a/test/elixir/test/proxyauth_test.exs +++ b/test/elixir/test/proxyauth_test.exs @@ -70,7 +70,7 @@ defmodule ProxyAuthTest do ) assert resp2.body["userCtx"]["name"] == "couch@apache.org" - assert resp2.body["userCtx"]["roles"] == ["_users", "test_role"] + assert resp2.body["userCtx"]["roles"] == ["test_role", "_users"] assert resp2.body["info"]["authenticated"] == "proxy" assert resp2.body["ok"] == true @@ -124,7 +124,7 @@ defmodule ProxyAuthTest do ) assert resp2.body["userCtx"]["name"] == "couch@apache.org" - assert resp2.body["userCtx"]["roles"] == ["_users", "test_role_1", "test_role_2"] + assert resp2.body["userCtx"]["roles"] == ["test_role_1", "test_role_2", "_users"] assert resp2.body["info"]["authenticated"] == "proxy" assert resp2.body["ok"] == true From 9db67a96693076bbd2354c68c00da0055ac12f08 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Fri, 11 Nov 2022 14:26:17 +0100 Subject: [PATCH 31/32] fix: restore previous function signature --- src/couch/src/couch_db.erl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index cc2810ae59a..919b4190347 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -326,6 +326,9 @@ open_doc(Db, Id, Options0) -> Else end. +apply_open_options(Db, Options) -> + apply_open_options2(Db, Options). + apply_open_options(Db, {ok, Doc}, Options) -> ok = validate_access(Db, Doc, Options), apply_open_options1({ok, Doc}, Options); From 841bc3830f5ef49c29284a8ef1a8112ca3cbd534 Mon Sep 17 00:00:00 2001 From: Jan Lehnardt Date: Sat, 12 Nov 2022 08:25:53 +0100 Subject: [PATCH 32/32] fix: add function signature change to new open_docs_rev/3 --- src/couch/src/couch_db.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index 919b4190347..fc3e6a02879 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -383,7 +383,7 @@ open_doc_revs(Db, IdRevsOpts, Options) when is_list(IdRevsOpts) -> AllResults = open_doc_revs_int(Db, IdRevs, Options), % Apply document open options like {atts_since, ...} etc ResultsZipFun = fun(DocOpts, {ok, Results}) -> - [apply_open_options(R, DocOpts) || R <- Results] + [apply_open_options(Db, R, DocOpts) || R <- Results] end, lists:zipwith(ResultsZipFun, DocOptsOnly, AllResults).