diff --git a/rel/overlay/etc/default.ini b/rel/overlay/etc/default.ini index ae691bb8dbf..0612506e335 100644 --- a/rel/overlay/etc/default.ini +++ b/rel/overlay/etc/default.ini @@ -388,6 +388,10 @@ authentication_db = _users ; max_iterations, password_scheme, password_regexp, proxy_use_secret, ; public_fields, secret, users_db_public, cookie_domain, same_site +; Per document access settings +[per_doc_access] +;enabled = false + ; CSP (Content Security Policy) Support [csp] ;utils_enable = true diff --git a/src/chttpd/src/chttpd.erl b/src/chttpd/src/chttpd.erl index c25c1883868..3c9ddd13872 100644 --- a/src/chttpd/src/chttpd.erl +++ b/src/chttpd/src/chttpd.erl @@ -1031,6 +1031,8 @@ error_info({bad_request, Error, Reason}) -> {400, couch_util:to_binary(Error), couch_util:to_binary(Reason)}; error_info({query_parse_error, Reason}) -> {400, <<"query_parse_error">>, Reason}; +error_info(access) -> + {403, <<"forbidden">>, <<"access">>}; error_info(database_does_not_exist) -> {404, <<"not_found">>, <<"Database does not exist.">>}; error_info(not_found) -> diff --git a/src/chttpd/src/chttpd_db.erl b/src/chttpd/src/chttpd_db.erl index 748b356fd6e..50069a8679b 100644 --- a/src/chttpd/src/chttpd_db.erl +++ b/src/chttpd/src/chttpd_db.erl @@ -955,16 +955,18 @@ view_cb(Msg, Acc) -> couch_mrview_http:view_cb(Msg, Acc). db_doc_req(#httpd{method = 'DELETE'} = Req, Db, DocId) -> - % check for the existence of the doc to handle the 404 case. - couch_doc_open(Db, DocId, nil, []), - case chttpd:qs_value(Req, "rev") of + % fetch the old doc revision, so we can compare access control + % in send_update_doc() later. + Doc0 = couch_doc_open(Db, DocId, nil, [{user_ctx, Req#httpd.user_ctx}]), + Revs = chttpd:qs_value(Req, "rev"), + case Revs of undefined -> Body = {[{<<"_deleted">>, true}]}; Rev -> Body = {[{<<"_rev">>, ?l2b(Rev)}, {<<"_deleted">>, true}]} end, - Doc = couch_doc_from_req(Req, Db, DocId, Body), - send_updated_doc(Req, Db, DocId, Doc); + Doc = #doc{revs = Revs, body = Body, deleted = true, access = Doc0#doc.access}, + send_updated_doc(Req, Db, DocId, couch_doc_from_req(Req, Db, DocId, Doc)); db_doc_req(#httpd{method = 'GET', mochi_req = MochiReq} = Req, Db, DocId) -> #doc_query_args{ rev = Rev0, @@ -1414,6 +1416,8 @@ receive_request_data(Req, LenLeft) when LenLeft > 0 -> receive_request_data(_Req, _) -> throw(<<"expected more data">>). +update_doc_result_to_json({#doc{id = Id, revs = Rev}, access}) -> + update_doc_result_to_json({{Id, Rev}, access}); update_doc_result_to_json({error, _} = Error) -> {_Code, Err, Msg} = chttpd:error_info(Error), {[ @@ -1968,6 +1972,7 @@ parse_shards_opt(Req) -> [ {n, parse_shards_opt("n", Req, config:get_integer("cluster", "n", 3))}, {q, parse_shards_opt("q", Req, config:get_integer("cluster", "q", 2))}, + {access, parse_shards_opt("access", Req, chttpd:qs_value(Req, "access", false))}, {placement, parse_shards_opt( "placement", Req, config:get("cluster", "placement") @@ -1996,7 +2001,23 @@ parse_shards_opt("placement", Req, Default) -> throw({bad_request, Err}) end end; +parse_shards_opt("access", Req, Value) when is_list(Value) -> + parse_shards_opt("access", Req, list_to_existing_atom(Value)); +parse_shards_opt("access", _Req, Value) when Value =:= true -> + case config:get_boolean("per_doc_access", "enabled", false) of + true -> + true; + false -> + Err = ?l2b(["The `access` option is not available on this CouchDB installation."]), + throw({bad_request, Err}) + end; +parse_shards_opt("access", _Req, Value) when Value =:= false -> + false; +parse_shards_opt("access", _Req, _Value) -> + Err = ?l2b(["The `access` value should be a boolean."]), + throw({bad_request, Err}); parse_shards_opt(Param, Req, Default) -> + couch_log:error("~n parse_shards_opt Param: ~p, Default: ~p~n", [Param, Default]), Val = chttpd:qs_value(Req, Param, Default), Err = ?l2b(["The `", Param, "` value should be a positive integer."]), case couch_util:validate_positive_int(Val) of diff --git a/src/chttpd/src/chttpd_view.erl b/src/chttpd/src/chttpd_view.erl index 1d721d18988..25c6b6d03a5 100644 --- a/src/chttpd/src/chttpd_view.erl +++ b/src/chttpd/src/chttpd_view.erl @@ -69,6 +69,7 @@ fabric_query_view(Db, Req, DDoc, ViewName, Args) -> Max = chttpd:chunked_response_buffer_size(), VAcc = #vacc{db = Db, req = Req, threshold = Max}, Options = [{user_ctx, Req#httpd.user_ctx}], + {ok, Resp} = fabric:query_view( Db, Options, diff --git a/src/couch/include/couch_db.hrl b/src/couch/include/couch_db.hrl index e70706a7f98..4b5e9705bb0 100644 --- a/src/couch/include/couch_db.hrl +++ b/src/couch/include/couch_db.hrl @@ -65,7 +65,8 @@ -record(doc_info, { id = <<"">>, high_seq = 0, - revs = [] % rev_info + revs = [], % rev_info + access = [] }). -record(size_info, { @@ -78,7 +79,8 @@ update_seq = 0, deleted = false, rev_tree = [], - sizes = #size_info{} + sizes = #size_info{}, + access = [] }). -record(httpd, { @@ -122,7 +124,8 @@ % key/value tuple of meta information, provided when using special options: % couch_db:open_doc(Db, Id, Options). - meta = [] + meta = [], + access = [] }). @@ -205,7 +208,8 @@ ptr, seq, sizes = #size_info{}, - atts = [] + atts = [], + access = [] }). -record (fabric_changes_acc, { diff --git a/src/couch/src/couch_access_native_proc.erl b/src/couch/src/couch_access_native_proc.erl new file mode 100644 index 00000000000..38c8e573814 --- /dev/null +++ b/src/couch/src/couch_access_native_proc.erl @@ -0,0 +1,139 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(couch_access_native_proc). +-behavior(gen_server). + +-export([ + start_link/0, + set_timeout/2, + prompt/2 +]). + +-export([ + init/1, + terminate/2, + handle_call/3, + handle_cast/2, + handle_info/2, + code_change/3 +]). + +-record(st, { + indexes = [], + % TODO: make configurable + timeout = 5000 +}). + +start_link() -> + gen_server:start_link(?MODULE, [], []). + +set_timeout(Pid, TimeOut) when is_integer(TimeOut), TimeOut > 0 -> + gen_server:call(Pid, {set_timeout, TimeOut}). + +prompt(Pid, Data) -> + gen_server:call(Pid, {prompt, Data}). + +init(_) -> + {ok, #st{}}. + +terminate(_Reason, _St) -> + ok. + +handle_call({set_timeout, TimeOut}, _From, St) -> + {reply, ok, St#st{timeout = TimeOut}}; +handle_call({prompt, [<<"reset">>]}, _From, St) -> + {reply, true, St#st{indexes = []}}; +handle_call({prompt, [<<"reset">>, _QueryConfig]}, _From, St) -> + {reply, true, St#st{indexes = []}}; +handle_call({prompt, [<<"add_fun">>, IndexInfo]}, _From, St) -> + {reply, true, St}; +handle_call({prompt, [<<"map_doc">>, Doc]}, _From, St) -> + {reply, map_doc(St, mango_json:to_binary(Doc)), St}; +handle_call({prompt, [<<"reduce">>, _, _]}, _From, St) -> + {reply, null, St}; +handle_call({prompt, [<<"rereduce">>, _, _]}, _From, St) -> + {reply, null, St}; +handle_call({prompt, [<<"index_doc">>, Doc]}, _From, St) -> + {reply, [[]], St}; +handle_call(Msg, _From, St) -> + {stop, {invalid_call, Msg}, {invalid_call, Msg}, St}. + +handle_cast(garbage_collect, St) -> + erlang:garbage_collect(), + {noreply, St}; +handle_cast(Msg, St) -> + {stop, {invalid_cast, Msg}, St}. + +handle_info(Msg, St) -> + {stop, {invalid_info, Msg}, St}. + +code_change(_OldVsn, St, _Extra) -> + {ok, St}. + +% return value is an array of arrays, first dimension is the different indexes +% [0] will be by-access-id // for this test, later we should make this by-access +% -seq, since that one we will always need, and by-access-id can be opt-in. +% the second dimension is the number of emit kv pairs: +% [ // the return value +% [ // the first view +% ['k1', 'v1'], // the first k/v pair for the first view +% ['k2', 'v2'] // second, etc. +% ], +% [ // second view +% ['l1', 'w1'] // first k/v par in second view +% ] +% ] +% {"id":"account/bongel","key":"account/bongel","value":{"rev":"1-967a00dff5e02add41819138abb3284d"}}, + +map_doc(_St, {Doc}) -> + case couch_util:get_value(<<"_access">>, Doc) of + undefined -> + % do not index this doc + [[], []]; + Access when is_list(Access) -> + Id = couch_util:get_value(<<"_id">>, Doc), + Rev = couch_util:get_value(<<"_rev">>, Doc), + Seq = couch_util:get_value(<<"_seq">>, Doc), + Deleted = couch_util:get_value(<<"_deleted">>, Doc, false), + BodySp = couch_util:get_value(<<"_body_sp">>, Doc), + % by-access-id + ById = + case Deleted of + false -> + lists:map( + fun(UserOrRole) -> + [ + [[UserOrRole, Id], Rev] + ] + end, + Access + ); + _True -> + [[]] + end, + + % by-access-seq + BySeq = lists:map( + fun(UserOrRole) -> + [ + [[UserOrRole, Seq], [{rev, Rev}, {deleted, Deleted}, {body_sp, BodySp}]] + ] + end, + Access + ), + ById ++ BySeq; + Else -> + % TODO: no comprende: should not be needed once we implement + % _access field validation + [[], []] + end. diff --git a/src/couch/src/couch_bt_engine.erl b/src/couch/src/couch_bt_engine.erl index 0549de566d3..d653a8eea20 100644 --- a/src/couch/src/couch_bt_engine.erl +++ b/src/couch/src/couch_bt_engine.erl @@ -664,20 +664,24 @@ id_tree_split(#full_doc_info{} = Info) -> update_seq = Seq, deleted = Deleted, sizes = SizeInfo, - rev_tree = Tree + rev_tree = Tree, + access = Access } = Info, - {Id, {Seq, ?b2i(Deleted), split_sizes(SizeInfo), disk_tree(Tree)}}. + {Id, {Seq, ?b2i(Deleted), split_sizes(SizeInfo), disk_tree(Tree), split_access(Access)}}. id_tree_join(Id, {HighSeq, Deleted, DiskTree}) -> % Handle old formats before data_size was added id_tree_join(Id, {HighSeq, Deleted, #size_info{}, DiskTree}); id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree}) -> + id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, []}); +id_tree_join(Id, {HighSeq, Deleted, Sizes, DiskTree, Access}) -> #full_doc_info{ id = Id, update_seq = HighSeq, deleted = ?i2b(Deleted), sizes = couch_db_updater:upgrade_sizes(Sizes), - rev_tree = rev_tree(DiskTree) + rev_tree = rev_tree(DiskTree), + access = join_access(Access) }. id_tree_reduce(reduce, FullDocInfos) -> @@ -714,21 +718,27 @@ seq_tree_split(#full_doc_info{} = Info) -> update_seq = Seq, deleted = Del, sizes = SizeInfo, - rev_tree = Tree + rev_tree = Tree, + access = Access } = Info, - {Seq, {Id, ?b2i(Del), split_sizes(SizeInfo), disk_tree(Tree)}}. + {Seq, {Id, ?b2i(Del), split_sizes(SizeInfo), disk_tree(Tree), split_access(Access)}}. seq_tree_join(Seq, {Id, Del, DiskTree}) when is_integer(Del) -> seq_tree_join(Seq, {Id, Del, {0, 0}, DiskTree}); seq_tree_join(Seq, {Id, Del, Sizes, DiskTree}) when is_integer(Del) -> + seq_tree_join(Seq, {Id, Del, Sizes, DiskTree, []}); +seq_tree_join(Seq, {Id, Del, Sizes, DiskTree, Access}) when is_integer(Del) -> #full_doc_info{ id = Id, update_seq = Seq, deleted = ?i2b(Del), sizes = join_sizes(Sizes), - rev_tree = rev_tree(DiskTree) + rev_tree = rev_tree(DiskTree), + access = join_access(Access) }; seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos}) -> + seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos, []}); +seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos, Access}) -> % Older versions stored #doc_info records in the seq_tree. % Compact to upgrade. Revs = lists:map( @@ -746,7 +756,8 @@ seq_tree_join(KeySeq, {Id, RevInfos, DeletedRevInfos}) -> #doc_info{ id = Id, high_seq = KeySeq, - revs = Revs ++ DeletedRevs + revs = Revs ++ DeletedRevs, + access = Access }. seq_tree_reduce(reduce, DocInfos) -> @@ -755,6 +766,9 @@ seq_tree_reduce(reduce, DocInfos) -> seq_tree_reduce(rereduce, Reds) -> lists:sum(Reds). +join_access(Access) -> Access. +split_access(Access) -> Access. + local_tree_split(#doc{revs = {0, [Rev]}} = Doc) when is_binary(Rev) -> #doc{ id = Id, diff --git a/src/couch/src/couch_btree.erl b/src/couch/src/couch_btree.erl index b974a22eeca..b908421f202 100644 --- a/src/couch/src/couch_btree.erl +++ b/src/couch/src/couch_btree.erl @@ -16,6 +16,7 @@ -export([fold/4, full_reduce/1, final_reduce/2, size/1, foldl/3, foldl/4]). -export([fold_reduce/4, lookup/2, get_state/1, set_options/2]). -export([extract/2, assemble/3, less/3]). +-export([full_reduce_with_options/2]). -include_lib("couch/include/couch_db.hrl"). @@ -109,6 +110,19 @@ full_reduce(#btree{root = nil, reduce = Reduce}) -> full_reduce(#btree{root = Root}) -> {ok, element(2, Root)}. +full_reduce_with_options(Bt, Options0) -> + CountFun = fun(_SeqStart, PartialReds, 0) -> + {ok, couch_btree:final_reduce(Bt, PartialReds)} + end, + [UserName] = proplists:get_value(start_key, Options0, <<"">>), + EndKey = {[UserName, {[]}]}, + Options = + Options0 ++ + [ + {end_key, EndKey} + ], + fold_reduce(Bt, CountFun, 0, Options). + size(#btree{root = nil}) -> 0; size(#btree{root = {_P, _Red}}) -> diff --git a/src/couch/src/couch_changes.erl b/src/couch/src/couch_changes.erl index 089cda975ed..22685ba4a7a 100644 --- a/src/couch/src/couch_changes.erl +++ b/src/couch/src/couch_changes.erl @@ -688,10 +688,13 @@ maybe_get_changes_doc(_Value, _Acc) -> []. load_doc(Db, Value, Opts, DocOpts, Filter) -> + %couch_log:error("~ncouch_changes:load_doc(): Value: ~p~n", [Value]), case couch_index_util:load_doc(Db, Value, Opts) of null -> + %couch_log:error("~ncouch_changes:load_doc(): null~n", []), [{doc, null}]; Doc -> + %couch_log:error("~ncouch_changes:load_doc(): Doc: ~p~n", [Doc]), [{doc, doc_to_json(Doc, DocOpts, Filter)}] end. diff --git a/src/couch/src/couch_db.erl b/src/couch/src/couch_db.erl index b02aae446f1..fc3e6a02879 100644 --- a/src/couch/src/couch_db.erl +++ b/src/couch/src/couch_db.erl @@ -31,6 +31,9 @@ is_admin/1, check_is_admin/1, check_is_member/1, + validate_access/2, + check_access/2, + has_access_enabled/1, name/1, get_after_doc_read_fun/1, @@ -137,6 +140,8 @@ ]). -include_lib("couch/include/couch_db.hrl"). +% TODO: can we do without this? +-include_lib("couch_mrview/include/couch_mrview.hrl"). -include("couch_db_int.hrl"). -define(DBNAME_REGEX, @@ -288,6 +293,12 @@ wait_for_compaction(#db{main_pid = Pid} = Db, Timeout) -> is_compacting(DbName) -> couch_server:is_compacting(DbName). +has_access_enabled(#db{access = true}) -> true; +has_access_enabled(_) -> false. + +is_read_from_ddoc_cache(Options) -> + lists:member(ddoc_cache, Options). + delete_doc(Db, Id, Revisions) -> DeletedDocs = [#doc{id = Id, revs = [Rev], deleted = true} || Rev <- Revisions], {ok, [Result]} = update_docs(Db, DeletedDocs, []), @@ -296,23 +307,37 @@ delete_doc(Db, Id, Revisions) -> open_doc(Db, IdOrDocInfo) -> open_doc(Db, IdOrDocInfo, []). -open_doc(Db, Id, Options) -> +open_doc(Db, Id, Options0) -> increment_stat(Db, [couchdb, database_reads]), + Options = + case has_access_enabled(Db) of + true -> Options0 ++ [conflicts]; + _Else -> Options0 + end, case open_doc_int(Db, Id, Options) of {ok, #doc{deleted = true} = Doc} -> case lists:member(deleted, Options) of true -> - apply_open_options({ok, Doc}, Options); + {ok, Doc}; false -> {not_found, deleted} end; Else -> - apply_open_options(Else, Options) + Else end. -apply_open_options({ok, Doc}, Options) -> +apply_open_options(Db, Options) -> + apply_open_options2(Db, Options). + +apply_open_options(Db, {ok, Doc}, Options) -> + ok = validate_access(Db, Doc, Options), + apply_open_options1({ok, Doc}, Options); +apply_open_options(_Db, Else, _Options) -> + Else. + +apply_open_options1({ok, Doc}, Options) -> apply_open_options2(Doc, Options); -apply_open_options(Else, _Options) -> +apply_open_options1(Else, _Options) -> Else. apply_open_options2(Doc, []) -> @@ -358,14 +383,14 @@ open_doc_revs(Db, IdRevsOpts, Options) when is_list(IdRevsOpts) -> AllResults = open_doc_revs_int(Db, IdRevs, Options), % Apply document open options like {atts_since, ...} etc ResultsZipFun = fun(DocOpts, {ok, Results}) -> - [apply_open_options(R, DocOpts) || R <- Results] + [apply_open_options(Db, R, DocOpts) || R <- Results] end, lists:zipwith(ResultsZipFun, DocOptsOnly, AllResults). open_doc_revs(Db, Id, Revs, Options) -> increment_stat(Db, [couchdb, database_reads]), [{ok, Results}] = open_doc_revs_int(Db, [{Id, Revs}], Options), - {ok, [apply_open_options(Result, Options) || Result <- Results]}. + {ok, [apply_open_options(Db, Result, Options) || Result <- Results]}. % Each returned result is a list of tuples: % {Id, MissingRevs, PossibleAncestors} @@ -609,7 +634,8 @@ get_db_info(Db) -> name = Name, compactor_pid = Compactor, instance_start_time = StartTime, - committed_update_seq = CommittedUpdateSeq + committed_update_seq = CommittedUpdateSeq, + access = Access } = Db, {ok, DocCount} = get_doc_count(Db), {ok, DelDocCount} = get_del_doc_count(Db), @@ -644,7 +670,8 @@ get_db_info(Db) -> {committed_update_seq, CommittedUpdateSeq}, {compacted_seq, CompactedSeq}, {props, Props}, - {uuid, Uuid} + {uuid, Uuid}, + {access, Access} ], {ok, InfoList}. @@ -769,6 +796,79 @@ security_error_type(#user_ctx{name = null}) -> security_error_type(#user_ctx{name = _}) -> forbidden. +is_per_user_ddoc(#doc{access = []}) -> false; +is_per_user_ddoc(#doc{access = [<<"_users">>]}) -> false; +is_per_user_ddoc(_) -> true. + +validate_access(Db, Doc) -> + validate_access(Db, Doc, []). + +validate_access(Db, Doc, Options) -> + validate_access1(has_access_enabled(Db), Db, Doc, Options). + +validate_access1(false, _Db, _Doc, _Options) -> + ok; +validate_access1(true, Db, #doc{meta = Meta} = Doc, Options) -> + case proplists:get_value(conflicts, Meta) of + % no conflicts + undefined -> + case is_read_from_ddoc_cache(Options) andalso is_per_user_ddoc(Doc) of + true -> throw({not_found, missing}); + _False -> validate_access2(Db, Doc) + end; + % only admins can read conflicted docs in _access dbs + _Else -> + % TODO: expand: if leaves agree on _access, then a user should be able + % to proceed normally, only if they disagree should this become admin-only + case is_admin(Db) of + true -> ok; + _Else2 -> throw({forbidden, <<"document is in conflict">>}) + end + end. +validate_access2(Db, Doc) -> + validate_access3(check_access(Db, Doc)). + +validate_access3(true) -> ok; +% TODO: fix language +validate_access3(_) -> throw({forbidden, <<"can't touch this">>}). + +check_access(Db, #doc{access = Access}) -> + check_access(Db, Access); +check_access(Db, Access) -> + %couch_log:notice("~n Db.user_ctx: ~p, Access: ~p ~n", [Db#db.user_ctx, Access]), + #user_ctx{ + name = UserName, + roles = UserRoles + } = Db#db.user_ctx, + case Access of + [] -> + % if doc has no _access, userCtX must be admin + is_admin(Db); + Access -> + % if doc has _access, userCtx must be admin OR matching user or role + % _access = ["a", "b", ] + case is_admin(Db) of + true -> + true; + _ -> + case {check_name(UserName, Access), check_roles(UserRoles, Access)} of + {true, _} -> true; + {_, true} -> true; + _ -> false + end + end + end. + +check_name(null, _Access) -> true; +check_name(UserName, Access) -> lists:member(UserName, Access). +% nicked from couch_db:check_security +% TODO: might need DRY + +check_roles(Roles, Access) -> + UserRolesSet = ordsets:from_list(Roles), + RolesSet = ordsets:from_list(Access ++ ["_users"]), + not ordsets:is_disjoint(UserRolesSet, RolesSet). + get_admins(#db{security = SecProps}) -> couch_util:get_value(<<"admins">>, SecProps, {[]}). @@ -910,9 +1010,14 @@ group_alike_docs([Doc | Rest], [Bucket | RestBuckets]) -> end. validate_doc_update(#db{} = Db, #doc{id = <<"_design/", _/binary>>} = Doc, _GetDiskDocFun) -> - case catch check_is_admin(Db) of - ok -> validate_ddoc(Db, Doc); - Error -> Error + case couch_doc:has_access(Doc) of + true -> + validate_ddoc(Db, Doc); + _Else -> + case catch check_is_admin(Db) of + ok -> validate_ddoc(Db, Doc); + Error -> Error + end end; validate_doc_update(#db{validate_doc_funs = undefined} = Db, Doc, Fun) -> ValidationFuns = load_validation_funs(Db), @@ -1307,6 +1412,36 @@ doc_tag(#doc{meta = Meta}) -> Else -> throw({invalid_doc_tag, Else}) end. +validate_update(Db, Doc) -> + case catch validate_access(Db, Doc) of + ok -> Doc; + Error -> Error + end. + +validate_docs_access(Db, DocBuckets, DocErrors) -> + validate_docs_access1(Db, DocBuckets, {[], DocErrors}). + +validate_docs_access1(_Db, [], {DocBuckets0, DocErrors}) -> + DocBuckets1 = lists:reverse(lists:map(fun lists:reverse/1, DocBuckets0)), + DocBuckets = + case DocBuckets1 of + [[]] -> []; + Else -> Else + end, + {ok, DocBuckets, lists:reverse(DocErrors)}; +validate_docs_access1(Db, [DocBucket | RestBuckets], {DocAcc, ErrorAcc}) -> + {NewBuckets, NewErrors} = lists:foldl( + fun(Doc, {Acc, ErrAcc}) -> + case catch validate_access(Db, Doc) of + ok -> {[Doc | Acc], ErrAcc}; + Error -> {Acc, [{doc_tag(Doc), Error} | ErrAcc]} + end + end, + {[], ErrorAcc}, + DocBucket + ), + validate_docs_access1(Db, RestBuckets, {[NewBuckets | DocAcc], NewErrors}). + update_docs(Db, Docs0, Options, ?REPLICATED_CHANGES) -> Docs = tag_docs(Docs0), @@ -1330,13 +1465,41 @@ update_docs(Db, Docs0, Options, ?REPLICATED_CHANGES) -> ] || Bucket <- DocBuckets ], - {ok, _} = write_and_commit( + {ok, Results} = write_and_commit( Db, DocBuckets2, NonRepDocs, [merge_conflicts | Options] ), - {ok, DocErrors}; + case couch_db:has_access_enabled(Db) of + false -> + % we’re done here + {ok, DocErrors}; + _ -> + AccessViolations = lists:filter(fun({_Ref, Tag}) -> Tag =:= access end, Results), + case length(AccessViolations) of + 0 -> + % we’re done here + {ok, DocErrors}; + _ -> + % dig out FDIs from Docs matching our tags/refs + DocsDict = lists:foldl( + fun(Doc, Dict) -> + Tag = doc_tag(Doc), + dict:store(Tag, Doc, Dict) + end, + dict:new(), + Docs + ), + AccessResults = lists:map( + fun({Ref, Access}) -> + {dict:fetch(Ref, DocsDict), Access} + end, + AccessViolations + ), + {ok, AccessResults} + end + end; update_docs(Db, Docs0, Options, ?INTERACTIVE_EDIT) -> Docs = tag_docs(Docs0), @@ -1458,7 +1621,7 @@ write_and_commit( MergeConflicts = lists:member(merge_conflicts, Options), MRef = erlang:monitor(process, Pid), try - Pid ! {update_docs, self(), DocBuckets, NonRepDocs, MergeConflicts}, + Pid ! {update_docs, self(), DocBuckets, NonRepDocs, MergeConflicts, Ctx}, case collect_results_with_metrics(Pid, MRef, []) of {ok, Results} -> {ok, Results}; @@ -1473,7 +1636,7 @@ write_and_commit( % We only retry once DocBuckets3 = prepare_doc_summaries(Db2, DocBuckets2), close(Db2), - Pid ! {update_docs, self(), DocBuckets3, NonRepDocs, MergeConflicts}, + Pid ! {update_docs, self(), DocBuckets3, NonRepDocs, MergeConflicts, Ctx}, case collect_results_with_metrics(Pid, MRef, []) of {ok, Results} -> {ok, Results}; retry -> throw({update_error, compaction_retry}) @@ -1685,6 +1848,12 @@ open_read_stream(Db, AttState) -> is_active_stream(Db, StreamEngine) -> couch_db_engine:is_active_stream(Db, StreamEngine). +changes_since(Db, StartSeq, Fun, Options, Acc) when is_record(Db, db) -> + case couch_db:has_access_enabled(Db) and not couch_db:is_admin(Db) of + true -> couch_mrview:query_changes_access(Db, StartSeq, Fun, Options, Acc); + false -> couch_db_engine:fold_changes(Db, StartSeq, Fun, Options, Acc) + end. + calculate_start_seq(_Db, _Node, Seq) when is_integer(Seq) -> Seq; calculate_start_seq(Db, Node, {Seq, Uuid}) -> @@ -1813,7 +1982,10 @@ fold_changes(Db, StartSeq, UserFun, UserAcc) -> fold_changes(Db, StartSeq, UserFun, UserAcc, []). fold_changes(Db, StartSeq, UserFun, UserAcc, Opts) -> - couch_db_engine:fold_changes(Db, StartSeq, UserFun, UserAcc, Opts). + case couch_db:has_access_enabled(Db) and not couch_db:is_admin(Db) of + true -> couch_mrview:query_changes_access(Db, StartSeq, UserFun, Opts, UserAcc); + false -> couch_db_engine:fold_changes(Db, StartSeq, UserFun, UserAcc, Opts) + end. fold_purge_infos(Db, StartPurgeSeq, Fun, Acc) -> fold_purge_infos(Db, StartPurgeSeq, Fun, Acc, []). @@ -1831,7 +2003,7 @@ open_doc_revs_int(Db, IdRevs, Options) -> lists:zipwith( fun({Id, Revs}, Lookup) -> case Lookup of - #full_doc_info{rev_tree = RevTree} -> + #full_doc_info{rev_tree = RevTree, access = Access} -> {FoundRevs, MissingRevs} = case Revs of all -> @@ -1852,7 +2024,10 @@ open_doc_revs_int(Db, IdRevs, Options) -> % we have the rev in our list but know nothing about it {{not_found, missing}, {Pos, Rev}}; #leaf{deleted = IsDeleted, ptr = SummaryPtr} -> - {ok, make_doc(Db, Id, IsDeleted, SummaryPtr, FoundRevPath)} + {ok, + make_doc( + Db, Id, IsDeleted, SummaryPtr, FoundRevPath, Access + )} end end, FoundRevs @@ -1874,23 +2049,35 @@ open_doc_revs_int(Db, IdRevs, Options) -> open_doc_int(Db, <> = Id, Options) -> case couch_db_engine:open_local_docs(Db, [Id]) of [#doc{} = Doc] -> - apply_open_options({ok, Doc}, Options); + case Doc#doc.body of + {Body} -> + Access = couch_util:get_value(<<"_access">>, Body), + apply_open_options(Db, {ok, Doc#doc{access = Access}}, Options); + _Else -> + apply_open_options(Db, {ok, Doc}, Options) + end; [not_found] -> {not_found, missing} end; -open_doc_int(Db, #doc_info{id = Id, revs = [RevInfo | _]} = DocInfo, Options) -> +open_doc_int(Db, #doc_info{id = Id, revs = [RevInfo | _], access = Access} = DocInfo, Options) -> #rev_info{deleted = IsDeleted, rev = {Pos, RevId}, body_sp = Bp} = RevInfo, - Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId]}), + Doc = make_doc(Db, Id, IsDeleted, Bp, {Pos, [RevId]}, Access), apply_open_options( - {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, Options + Db, + {ok, Doc#doc{meta = doc_meta_info(DocInfo, [], Options)}}, + Options ); -open_doc_int(Db, #full_doc_info{id = Id, rev_tree = RevTree} = FullDocInfo, Options) -> +open_doc_int( + Db, #full_doc_info{id = Id, rev_tree = RevTree, access = Access} = FullDocInfo, Options +) -> #doc_info{revs = [#rev_info{deleted = IsDeleted, rev = Rev, body_sp = Bp} | _]} = DocInfo = couch_doc:to_doc_info(FullDocInfo), {[{_, RevPath}], []} = couch_key_tree:get(RevTree, [Rev]), - Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath), + Doc = make_doc(Db, Id, IsDeleted, Bp, RevPath, Access), apply_open_options( - {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, Options + Db, + {ok, Doc#doc{meta = doc_meta_info(DocInfo, RevTree, Options)}}, + Options ); open_doc_int(Db, Id, Options) -> case get_full_doc_info(Db, Id) of @@ -1951,21 +2138,26 @@ doc_meta_info( true -> [{local_seq, Seq}] end. -make_doc(_Db, Id, Deleted, nil = _Bp, RevisionPath) -> +make_doc(Db, Id, Deleted, Bp, {Pos, Revs}) -> + make_doc(Db, Id, Deleted, Bp, {Pos, Revs}, []). + +make_doc(_Db, Id, Deleted, nil = _Bp, RevisionPath, Access) -> #doc{ id = Id, revs = RevisionPath, body = [], atts = [], - deleted = Deleted + deleted = Deleted, + access = Access }; -make_doc(#db{} = Db, Id, Deleted, Bp, {Pos, Revs}) -> +make_doc(#db{} = Db, Id, Deleted, Bp, {Pos, Revs}, Access) -> RevsLimit = get_revs_limit(Db), Doc0 = couch_db_engine:read_doc_body(Db, #doc{ id = Id, revs = {Pos, lists:sublist(Revs, 1, RevsLimit)}, body = Bp, - deleted = Deleted + deleted = Deleted, + access = Access }), Doc1 = case Doc0#doc.atts of diff --git a/src/couch/src/couch_db_int.hrl b/src/couch/src/couch_db_int.hrl index 7da0ce5dfe2..b67686fab88 100644 --- a/src/couch/src/couch_db_int.hrl +++ b/src/couch/src/couch_db_int.hrl @@ -37,7 +37,8 @@ waiting_delayed_commit_deprecated, options = [], - compression + compression, + access = false }). diff --git a/src/couch/src/couch_db_updater.erl b/src/couch/src/couch_db_updater.erl index fac5aee1068..1e5566b210b 100644 --- a/src/couch/src/couch_db_updater.erl +++ b/src/couch/src/couch_db_updater.erl @@ -24,6 +24,11 @@ % 10 GiB -define(DEFAULT_MAX_PARTITION_SIZE, 16#280000000). +-define(DEFAULT_SECURITY_OBJECT, [ + {<<"members">>, {[{<<"roles">>, [<<"_admin">>]}]}}, + {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} +]). + -record(merge_acc, { revs_limit, merge_conflicts, @@ -36,7 +41,7 @@ init({Engine, DbName, FilePath, Options0}) -> erlang:put(io_priority, {db_update, DbName}), update_idle_limit_from_config(), - DefaultSecObj = default_security_object(DbName), + DefaultSecObj = default_security_object(DbName, Options0), Options = [{default_security_object, DefaultSecObj} | Options0], try {ok, EngineState} = couch_db_engine:init(Engine, FilePath, Options), @@ -165,7 +170,7 @@ handle_cast(Msg, #db{name = Name} = Db) -> {stop, Msg, Db}. handle_info( - {update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts}, + {update_docs, Client, GroupedDocs, NonRepDocs, MergeConflicts, UserCtx}, Db ) -> GroupedDocs2 = sort_and_tag_grouped_docs(Client, GroupedDocs), @@ -181,7 +186,7 @@ handle_info( Clients = [Client] end, NonRepDocs2 = [{Client, NRDoc} || NRDoc <- NonRepDocs], - try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts) of + try update_docs_int(Db, GroupedDocs3, NonRepDocs2, MergeConflicts, UserCtx) of {ok, Db2, UpdatedDDocIds} -> ok = couch_server:db_updated(Db2), case {couch_db:get_update_seq(Db), couch_db:get_update_seq(Db2)} of @@ -320,6 +325,7 @@ init_db(DbName, FilePath, EngineState, Options) -> BDU = couch_util:get_value(before_doc_update, Options, nil), ADR = couch_util:get_value(after_doc_read, Options, nil), + Access = couch_util:get_value(access, Options, false), NonCreateOpts = [Opt || Opt <- Options, Opt /= create], InitDb = #db{ @@ -329,7 +335,8 @@ init_db(DbName, FilePath, EngineState, Options) -> instance_start_time = StartTime, options = NonCreateOpts, before_doc_update = BDU, - after_doc_read = ADR + after_doc_read = ADR, + access = Access }, DbProps = couch_db_engine:get_props(InitDb), @@ -390,7 +397,8 @@ flush_trees( active = WrittenSize, external = ExternalSize }, - atts = AttSizeInfo + atts = AttSizeInfo, + access = NewDoc#doc.access }, {Leaf, add_sizes(Type, Leaf, SizesAcc)}; #leaf{} -> @@ -474,6 +482,9 @@ doc_tag(#doc{meta = Meta}) -> Else -> throw({invalid_doc_tag, Else}) end. +merge_rev_trees([[]], [], Acc) -> + % validate_docs_access left us with no docs to merge + {ok, Acc}; merge_rev_trees([], [], Acc) -> {ok, Acc#merge_acc{ add_infos = lists:reverse(Acc#merge_acc.add_infos) @@ -655,22 +666,29 @@ maybe_stem_full_doc_info(#full_doc_info{rev_tree = Tree} = Info, Limit) -> Info end. -update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> +update_docs_int(Db, DocsList, LocalDocs, MergeConflicts, UserCtx) -> UpdateSeq = couch_db_engine:get_update_seq(Db), RevsLimit = couch_db_engine:get_revs_limit(Db), Ids = [Id || [{_Client, #doc{id = Id}} | _] <- DocsList], + % TODO: maybe a perf hit, instead of zip3-ing existing Accesses into + % our doc lists, maybe find 404 docs differently down in + % validate_docs_access (revs is [], which we can then use + % to skip validation as we know it is the first doc rev) + Accesses = [Access || [{_Client, #doc{access = Access}} | _] <- DocsList], + % lookup up the old documents, if they exist. OldDocLookups = couch_db_engine:open_docs(Db, Ids), - OldDocInfos = lists:zipwith( + OldDocInfos = lists:zipwith3( fun - (_Id, #full_doc_info{} = FDI) -> + (_Id, #full_doc_info{} = FDI, _Access) -> FDI; - (Id, not_found) -> - #full_doc_info{id = Id} + (Id, not_found, Access) -> + #full_doc_info{id = Id, access = Access} end, Ids, - OldDocLookups + OldDocLookups, + Accesses ), %% Get the list of full partitions @@ -707,7 +725,17 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> cur_seq = UpdateSeq, full_partitions = FullPartitions }, - {ok, AccOut} = merge_rev_trees(DocsList, OldDocInfos, AccIn), + % Loop over DocsList, validate_access for each OldDocInfo on Db, + %. if no OldDocInfo, then send to DocsListValidated, keep OldDocsInfo + % if valid, then send to DocsListValidated, OldDocsInfo + %. if invalid, then send_result tagged `access`(c.f. `conflict) + %. and don’t add to DLV, nor ODI + + {DocsListValidated, OldDocInfosValidated} = validate_docs_access( + Db, UserCtx, DocsList, OldDocInfos + ), + + {ok, AccOut} = merge_rev_trees(DocsListValidated, OldDocInfosValidated, AccIn), #merge_acc{ add_infos = NewFullDocInfos, rem_seqs = RemSeqs @@ -717,7 +745,8 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> % the trees, the attachments are already written to disk) {ok, IndexFDIs} = flush_trees(Db, NewFullDocInfos, []), Pairs = pair_write_info(OldDocLookups, IndexFDIs), - LocalDocs2 = update_local_doc_revs(LocalDocs), + LocalDocs1 = apply_local_docs_access(Db, LocalDocs), + LocalDocs2 = update_local_doc_revs(LocalDocs1), {ok, Db1} = couch_db_engine:write_doc_infos(Db, Pairs, LocalDocs2), @@ -732,18 +761,92 @@ update_docs_int(Db, DocsList, LocalDocs, MergeConflicts) -> length(LocalDocs2) ), - % Check if we just updated any design documents, and update the validation - % funs if we did. + % Check if we just updated any non-access design documents, + % and update the validation funs if we did. + NonAccessIds = [Id || [{_Client, #doc{id = Id, access = []}} | _] <- DocsList], UpdatedDDocIds = lists:flatmap( fun (<<"_design/", _/binary>> = Id) -> [Id]; (_) -> [] end, - Ids + NonAccessIds ), {ok, commit_data(Db1), UpdatedDDocIds}. +% at this point, we already validated this Db is access enabled, so do the checks right away. +check_access(Db, UserCtx, Access) -> couch_db:check_access(Db#db{user_ctx = UserCtx}, Access). + +validate_docs_access(Db, UserCtx, DocsList, OldDocInfos) -> + case couch_db:has_access_enabled(Db) of + true -> validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos); + _Else -> {DocsList, OldDocInfos} + end. + +validate_docs_access_int(Db, UserCtx, DocsList, OldDocInfos) -> + validate_docs_access(Db, UserCtx, DocsList, OldDocInfos, [], []). + +validate_docs_access(_Db, _UserCtx, [], [], DocsListValidated, OldDocInfosValidated) -> + {lists:reverse(DocsListValidated), lists:reverse(OldDocInfosValidated)}; +validate_docs_access( + Db, UserCtx, [Docs | DocRest], [OldInfo | OldInfoRest], DocsListValidated, OldDocInfosValidated +) -> + % loop over Docs as {Client, NewDoc} + % validate Doc + % if valid, then put back in Docs + % if not, then send_result and skip + NewDocs = lists:foldl( + fun({Client, Doc}, Acc) -> + % check if we are allowed to update the doc, skip when new doc + OldDocMatchesAccess = + case OldInfo#full_doc_info.rev_tree of + [] -> true; + _ -> check_access(Db, UserCtx, OldInfo#full_doc_info.access) + end, + + NewDocMatchesAccess = check_access(Db, UserCtx, Doc#doc.access), + case OldDocMatchesAccess andalso NewDocMatchesAccess of + % if valid, then send to DocsListValidated, OldDocsInfo + true -> + % and store the access context on the new doc + [{Client, Doc} | Acc]; + % if invalid, then send_result tagged `access`(c.f. `conflict) + false -> + % and don’t add to DLV, nor ODI + send_result(Client, Doc, access), + Acc + end + end, + [], + Docs + ), + + {NewDocsListValidated, NewOldDocInfosValidated} = + case length(NewDocs) of + % we sent out all docs as invalid access, drop the old doc info associated with it + 0 -> + {[NewDocs | DocsListValidated], OldDocInfosValidated}; + _ -> + {[NewDocs | DocsListValidated], [OldInfo | OldDocInfosValidated]} + end, + validate_docs_access( + Db, UserCtx, DocRest, OldInfoRest, NewDocsListValidated, NewOldDocInfosValidated + ). + +apply_local_docs_access(Db, Docs) -> + apply_local_docs_access1(couch_db:has_access_enabled(Db), Docs). + +apply_local_docs_access1(false, Docs) -> + Docs; +apply_local_docs_access1(true, Docs) -> + lists:map( + fun({Client, #doc{access = Access, body = {Body}} = Doc}) -> + Doc1 = Doc#doc{body = {[{<<"_access">>, Access} | Body]}}, + {Client, Doc1} + end, + Docs + ). + update_local_doc_revs(Docs) -> lists:foldl( fun({Client, Doc}, Acc) -> @@ -760,6 +863,14 @@ update_local_doc_revs(Docs) -> Docs ). +default_security_object(DbName, []) -> + default_security_object(DbName); +default_security_object(DbName, Options) -> + case lists:member({access, true}, Options) of + false -> default_security_object(DbName); + true -> ?DEFAULT_SECURITY_OBJECT + end. + increment_local_doc_revs(#doc{deleted = true} = Doc) -> {ok, Doc#doc{revs = {0, [0]}}}; increment_local_doc_revs(#doc{revs = {0, []}} = Doc) -> @@ -925,20 +1036,14 @@ get_meta_body_size(Meta) -> default_security_object(<<"shards/", _/binary>>) -> case config:get("couchdb", "default_security", "admin_only") of "admin_only" -> - [ - {<<"members">>, {[{<<"roles">>, [<<"_admin">>]}]}}, - {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} - ]; + ?DEFAULT_SECURITY_OBJECT; Everyone when Everyone == "everyone"; Everyone == "admin_local" -> [] end; default_security_object(_DbName) -> case config:get("couchdb", "default_security", "admin_only") of Admin when Admin == "admin_only"; Admin == "admin_local" -> - [ - {<<"members">>, {[{<<"roles">>, [<<"_admin">>]}]}}, - {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} - ]; + ?DEFAULT_SECURITY_OBJECT; "everyone" -> [] end. diff --git a/src/couch/src/couch_doc.erl b/src/couch/src/couch_doc.erl index 95b1c8b4165..acf4994a33b 100644 --- a/src/couch/src/couch_doc.erl +++ b/src/couch/src/couch_doc.erl @@ -26,6 +26,8 @@ -export([with_ejson_body/1]). -export([is_deleted/1]). +-export([has_access/1, has_no_access/1]). + -include_lib("couch/include/couch_db.hrl"). -spec to_path(#doc{}) -> path(). @@ -40,15 +42,24 @@ to_branch(Doc, [RevId | Rest]) -> [{RevId, ?REV_MISSING, to_branch(Doc, Rest)}]. % helpers used by to_json_obj +reduce_access({Access}) -> Access; +reduce_access(Access) -> Access. + to_json_rev(0, []) -> []; to_json_rev(Start, [FirstRevId | _]) -> [{<<"_rev">>, ?l2b([integer_to_list(Start), "-", revid_to_str(FirstRevId)])}]. -to_json_body(true, {Body}) -> +to_json_body(true, {Body}, []) -> Body ++ [{<<"_deleted">>, true}]; -to_json_body(false, {Body}) -> - Body. +to_json_body(false, {Body}, []) -> + Body; +to_json_body(true, {Body}, Access0) -> + Access = reduce_access(Access0), + Body ++ [{<<"_deleted">>, true}] ++ [{<<"_access">>, {Access}}]; +to_json_body(false, {Body}, Access0) -> + Access = reduce_access(Access0), + Body ++ [{<<"_access">>, Access}]. to_json_revisions(Options, Start, RevIds0) -> RevIds = @@ -138,14 +149,15 @@ doc_to_json_obj( deleted = Del, body = Body, revs = {Start, RevIds}, - meta = Meta + meta = Meta, + access = Access } = Doc, Options ) -> { [{<<"_id">>, Id}] ++ to_json_rev(Start, RevIds) ++ - to_json_body(Del, Body) ++ + to_json_body(Del, Body, Access) ++ to_json_revisions(Options, Start, RevIds) ++ to_json_meta(Meta) ++ to_json_attachments(Doc#doc.atts, Options) @@ -335,13 +347,8 @@ transfer_fields([{<<"_conflicts">>, _} | Rest], Doc, DbName) -> transfer_fields(Rest, Doc, DbName); transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc, DbName) -> transfer_fields(Rest, Doc, DbName); -% special field for per doc access control, for future compatibility -transfer_fields( - [{<<"_access">>, _} = Field | Rest], - #doc{body = Fields} = Doc, - DbName -) -> - transfer_fields(Rest, Doc#doc{body = [Field | Fields]}, DbName); +transfer_fields([{<<"_access">>, Access} = Field | Rest], Doc, DbName) -> + transfer_fields(Rest, Doc#doc{access = Access}, DbName); % special fields for replication documents transfer_fields( [{<<"_replication_state">>, _} = Field | Rest], @@ -401,7 +408,7 @@ max_seq(Tree, UpdateSeq) -> end, couch_key_tree:fold(FoldFun, UpdateSeq, Tree). -to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq}) -> +to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq, access = Access}) -> RevInfosAndPath = [ {rev_info(Node), Path} || {_Leaf, Path} = Node <- @@ -419,7 +426,10 @@ to_doc_info_path(#full_doc_info{id = Id, rev_tree = Tree, update_seq = FDISeq}) ), [{_RevInfo, WinPath} | _] = SortedRevInfosAndPath, RevInfos = [RevInfo || {RevInfo, _Path} <- SortedRevInfosAndPath], - {#doc_info{id = Id, high_seq = max_seq(Tree, FDISeq), revs = RevInfos}, WinPath}. + { + #doc_info{id = Id, high_seq = max_seq(Tree, FDISeq), revs = RevInfos, access = Access}, + WinPath + }. rev_info({#leaf{} = Leaf, {Pos, [RevId | _]}}) -> #rev_info{ @@ -459,6 +469,20 @@ is_deleted(Tree) -> false end. +get_access({Props}) -> + get_access(couch_doc:from_json_obj({Props})); +get_access(#doc{access = Access}) -> + Access. + +has_access(Doc) -> + has_access1(get_access(Doc)). + +has_no_access(Doc) -> + not has_access1(get_access(Doc)). + +has_access1([]) -> false; +has_access1(_) -> true. + get_validate_doc_fun({Props}) -> get_validate_doc_fun(couch_doc:from_json_obj({Props})); get_validate_doc_fun(#doc{body = {Props}} = DDoc) -> diff --git a/src/couch/src/couch_httpd_auth.erl b/src/couch/src/couch_httpd_auth.erl index 4a7b217d1fe..eff387a525b 100644 --- a/src/couch/src/couch_httpd_auth.erl +++ b/src/couch/src/couch_httpd_auth.erl @@ -99,6 +99,13 @@ basic_name_pw(Req) -> nil end. +extract_roles(UserProps) -> + Roles = couch_util:get_value(<<"roles">>, UserProps, []), + case lists:member(<<"_admin">>, Roles) of + true -> Roles; + _ -> Roles ++ [<<"_users">>] + end. + default_authentication_handler(Req) -> default_authentication_handler(Req, couch_auth_cache). @@ -117,7 +124,7 @@ default_authentication_handler(Req, AuthModule) -> Req#httpd{ user_ctx = #user_ctx{ name = UserName, - roles = couch_util:get_value(<<"roles">>, UserProps, []) + roles = extract_roles(UserProps) } }; false -> @@ -189,7 +196,7 @@ proxy_auth_user(Req) -> Roles = case header_value(Req, XHeaderRoles) of undefined -> []; - Else -> re:split(Else, "\\s*,\\s*", [trim, {return, binary}]) + Else -> re:split(Else, "\\s*,\\s*", [trim, {return, binary}]) ++ [<<"_users">>] end, case chttpd_util:get_chttpd_auth_config_boolean( @@ -370,9 +377,7 @@ cookie_authentication_handler(#httpd{mochi_req = MochiReq} = Req, AuthModule) -> Req#httpd{ user_ctx = #user_ctx{ name = ?l2b(User), - roles = couch_util:get_value( - <<"roles">>, UserProps, [] - ) + roles = extract_roles(UserProps) }, auth = {FullSecret, TimeLeft < Timeout * 0.9} }; @@ -500,7 +505,7 @@ handle_session_req(#httpd{method = 'POST', mochi_req = MochiReq} = Req, AuthModu {[ {ok, true}, {name, UserName}, - {roles, couch_util:get_value(<<"roles">>, UserProps, [])} + {roles, extract_roles(UserProps)} ]} ); false -> diff --git a/src/couch/src/couch_proc_manager.erl b/src/couch/src/couch_proc_manager.erl index 46765b339f4..f7903ebd4e6 100644 --- a/src/couch/src/couch_proc_manager.erl +++ b/src/couch/src/couch_proc_manager.erl @@ -104,6 +104,7 @@ init([]) -> ets:insert(?SERVERS, get_servers_from_env("COUCHDB_QUERY_SERVER_")), ets:insert(?SERVERS, get_servers_from_env("COUCHDB_NATIVE_QUERY_SERVER_")), ets:insert(?SERVERS, [{"QUERY", {mango_native_proc, start_link, []}}]), + ets:insert(?SERVERS, [{"_ACCESS", {couch_access_native_proc, start_link, []}}]), maybe_configure_erlang_native_servers(), {ok, #state{ diff --git a/src/couch/src/couch_util.erl b/src/couch/src/couch_util.erl index dc58e2bf60e..4199ceb9c59 100644 --- a/src/couch/src/couch_util.erl +++ b/src/couch/src/couch_util.erl @@ -46,6 +46,7 @@ -export([verify_hash_names/2]). -export([get_config_hash_algorithms/0]). -export([remove_sensitive_data/1]). +-export([validate_design_access/1, validate_design_access/2]). -include_lib("couch/include/couch_db.hrl"). @@ -867,3 +868,15 @@ remove_sensitive_data(KVList) -> KVList1 = lists:keyreplace(<<"password">>, 1, KVList, {<<"password">>, <<"****">>}), % some KVList entries are atoms, so test fo this too lists:keyreplace(password, 1, KVList1, {password, <<"****">>}). + +validate_design_access(DDoc) -> + validate_design_access1(DDoc, true). + +validate_design_access(Db, DDoc) -> + validate_design_access1(DDoc, couch_db:has_access_enabled(Db)). + +validate_design_access1(_DDoc, false) -> ok; +validate_design_access1(DDoc, true) -> is_users_ddoc(DDoc). + +is_users_ddoc(#doc{access = [<<"_users">>]}) -> ok; +is_users_ddoc(_) -> throw({forbidden, <<"per-user ddoc access">>}). diff --git a/src/couch/test/eunit/couchdb_access_tests.erl b/src/couch/test/eunit/couchdb_access_tests.erl new file mode 100644 index 00000000000..126e43fb262 --- /dev/null +++ b/src/couch/test/eunit/couchdb_access_tests.erl @@ -0,0 +1,1475 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(couchdb_access_tests). + +-include_lib("couch/include/couch_eunit.hrl"). + +-define(CONTENT_JSON, {"Content-Type", "application/json"}). +-define(ADMIN_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"a", "a"}}]). +-define(USERX_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"x", "x"}}]). +-define(USERY_REQ_HEADERS, [?CONTENT_JSON, {basic_auth, {"y", "y"}}]). +-define(SECURITY_OBJECT, + {[ + {<<"members">>, {[{<<"roles">>, [<<"_admin">>, <<"_users">>]}]}}, + {<<"admins">>, {[{<<"roles">>, [<<"_admin">>]}]}} + ]} +). + +url() -> + Addr = config:get("httpd", "bind_address", "127.0.0.1"), + lists:concat(["http://", Addr, ":", port()]). + +before_each(_) -> + R = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), + %?debugFmt("~nRequest: ~p~n", [R]), + {ok, 201, _, _} = R, + {ok, _, _, _} = test_request:put( + url() ++ "/db/_security", ?ADMIN_REQ_HEADERS, jiffy:encode(?SECURITY_OBJECT) + ), + url(). + +after_each(_, Url) -> + {ok, 200, _, _} = test_request:delete(Url ++ "/db", ?ADMIN_REQ_HEADERS), + {_, _, _, _} = test_request:delete(Url ++ "/db2", ?ADMIN_REQ_HEADERS), + {_, _, _, _} = test_request:delete(Url ++ "/db3", ?ADMIN_REQ_HEADERS), + ok. + +before_all() -> + Couch = test_util:start_couch([chttpd, couch_replicator]), + Hashed = couch_passwords:hash_admin_password("a"), + ok = config:set("admins", "a", binary_to_list(Hashed), _Persist = false), + ok = config:set("couchdb", "uuid", "21ac467c1bc05e9d9e9d2d850bb1108f", _Persist = false), + ok = config:set("log", "level", "debug", _Persist = false), + ok = config:set("per_doc_access", "enabled", "true", _Persist = false), + + % cleanup and setup + {ok, _, _, _} = test_request:delete(url() ++ "/db", ?ADMIN_REQ_HEADERS), + % {ok, _, _, _} = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), + + % create users + UserDbUrl = url() ++ "/_users?q=1&n=1", + {ok, _, _, _} = test_request:delete(UserDbUrl, ?ADMIN_REQ_HEADERS, ""), + {ok, 201, _, _} = test_request:put(UserDbUrl, ?ADMIN_REQ_HEADERS, ""), + + UserXDocUrl = url() ++ "/_users/org.couchdb.user:x", + UserXDocBody = "{ \"name\":\"x\", \"roles\": [], \"password\":\"x\", \"type\": \"user\" }", + {ok, 201, _, _} = test_request:put(UserXDocUrl, ?ADMIN_REQ_HEADERS, UserXDocBody), + + UserYDocUrl = url() ++ "/_users/org.couchdb.user:y", + UserYDocBody = "{ \"name\":\"y\", \"roles\": [], \"password\":\"y\", \"type\": \"user\" }", + {ok, 201, _, _} = test_request:put(UserYDocUrl, ?ADMIN_REQ_HEADERS, UserYDocBody), + Couch. + +after_all(_) -> + UserDbUrl = url() ++ "/_users", + {ok, _, _, _} = test_request:delete(UserDbUrl, ?ADMIN_REQ_HEADERS, ""), + ok = test_util:stop_couch(done). + +access_test_() -> + Tests = [ + % Server config + fun should_not_let_create_access_db_if_disabled/2, + + % Doc creation + fun should_not_let_anonymous_user_create_doc/2, + fun should_let_admin_create_doc_with_access/2, + fun should_let_admin_create_doc_without_access/2, + fun should_let_user_create_doc_for_themselves/2, + fun should_not_let_user_create_doc_for_someone_else/2, + fun should_let_user_create_access_ddoc/2, + fun access_ddoc_should_have_no_effects/2, + + % Doc updates + fun users_with_access_can_update_doc/2, + fun users_without_access_can_not_update_doc/2, + fun users_with_access_can_not_change_access/2, + fun users_with_access_can_not_remove_access/2, + + % Doc reads + fun should_let_admin_read_doc_with_access/2, + fun user_with_access_can_read_doc/2, + fun user_without_access_can_not_read_doc/2, + fun user_can_not_read_doc_without_access/2, + fun admin_with_access_can_read_conflicted_doc/2, + fun user_with_access_can_not_read_conflicted_doc/2, + + % Doc deletes + fun should_let_admin_delete_doc_with_access/2, + fun should_let_user_delete_doc_for_themselves/2, + fun should_not_let_user_delete_doc_for_someone_else/2, + + % _all_docs with include_docs + fun should_let_admin_fetch_all_docs/2, + fun should_let_user_fetch_their_own_all_docs/2, + + % _changes + fun should_let_admin_fetch_changes/2, + fun should_let_user_fetch_their_own_changes/2, + + % views + fun should_not_allow_admin_access_ddoc_view_request/2, + fun should_not_allow_user_access_ddoc_view_request/2, + fun should_allow_admin_users_access_ddoc_view_request/2, + fun should_allow_user_users_access_ddoc_view_request/2, + + % replication + fun should_allow_admin_to_replicate_from_access_to_access/2, + fun should_allow_admin_to_replicate_from_no_access_to_access/2, + fun should_allow_admin_to_replicate_from_access_to_no_access/2, + fun should_allow_admin_to_replicate_from_no_access_to_no_access/2, + % + fun should_allow_user_to_replicate_from_access_to_access/2, + fun should_allow_user_to_replicate_from_access_to_no_access/2, + fun should_allow_user_to_replicate_from_no_access_to_access/2, + fun should_allow_user_to_replicate_from_no_access_to_no_access/2, + + % _revs_diff for docs you don’t have access to + fun should_not_allow_user_to_revs_diff_other_docs/2 + + % TODO: create test db with role and not _users in _security.members + % and make sure a user in that group can access while a user not + % in that group cant + % % potential future feature + % % fun should_let_user_fetch_their_own_all_docs_plus_users_ddocs/2%, + ], + { + "Access tests", + { + setup, + fun before_all/0, + fun after_all/1, + [ + make_test_cases(clustered, Tests) + ] + } + }. + +make_test_cases(Mod, Funs) -> + { + lists:flatten(io_lib:format("~s", [Mod])), + {foreachx, fun before_each/1, fun after_each/2, [{Mod, Fun} || Fun <- Funs]} + }. + +% Doc creation +% http://127.0.0.1:64903/db/a?revs=true&open_revs=%5B%221-23202479633c2b380f79507a776743d5%22%5D&latest=true + +% should_do_the_thing(_PortType, Url) -> +% ?_test(begin +% {ok, _, _, _} = test_request:put(Url ++ "/db/a", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), +% {ok, Code, _, _} = test_request:get(Url ++ "/db/a?revs=true&open_revs=%5B%221-23202479633c2b380f79507a776743d5%22%5D&latest=true", +% ?USERX_REQ_HEADERS), +% ?assertEqual(200, Code) +% end). +% + +should_not_let_create_access_db_if_disabled(_PortType, Url) -> + ok = config:set("per_doc_access", "enabled", "false", _Persist = false), + {ok, Code, _, _} = test_request:put(url() ++ "/db?q=1&n=1&access=true", ?ADMIN_REQ_HEADERS, ""), + ok = config:set("per_doc_access", "enabled", "true", _Persist = false), + ?_assertEqual(400, Code). + +should_not_let_anonymous_user_create_doc(_PortType, Url) -> + % TODO: debugging leftover + % BulkDocsBody = {[ + % {<<"docs">>, [ + % {[{<<"_id">>, <<"a">>}]}, + % {[{<<"_id">>, <<"a">>}]}, + % {[{<<"_id">>, <<"b">>}]}, + % {[{<<"_id">>, <<"c">>}]} + % ]} + % ]}, + % Resp = test_request:post(Url ++ "/db/_bulk_docs", ?ADMIN_REQ_HEADERS, jiffy:encode(BulkDocsBody)), + % ?debugFmt("~nResp: ~p~n", [Resp]), + {ok, Code, _, _} = test_request:put(Url ++ "/db/a", "{\"a\":1,\"_access\":[\"x\"]}"), + ?_assertEqual(401, Code). + +should_let_admin_create_doc_with_access(_PortType, Url) -> + {ok, Code, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + ?_assertEqual(201, Code). + +should_let_admin_create_doc_without_access(_PortType, Url) -> + {ok, Code, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1}" + ), + ?_assertEqual(201, Code). + +should_let_user_create_doc_for_themselves(_PortType, Url) -> + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + ?_assertEqual(201, Code). + +should_not_let_user_create_doc_for_someone_else(_PortType, Url) -> + {ok, Code, _, _} = test_request:put( + Url ++ "/db/c", + ?USERY_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + ?_assertEqual(403, Code). + +should_let_user_create_access_ddoc(_PortType, Url) -> + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/dx", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + ?_assertEqual(201, Code). + +access_ddoc_should_have_no_effects(_PortType, Url) -> + ?_test(begin + Ddoc = + "{ \"_access\":[\"x\"], \"validate_doc_update\": \"function(newDoc, oldDoc, userCtx) { throw({unauthorized: 'throw error'})}\", \"views\": { \"foo\": { \"map\": \"function(doc) { emit(doc._id) }\" } }, \"shows\": { \"boo\": \"function() {}\" }, \"lists\": { \"hoo\": \"function() {}\" }, \"update\": { \"goo\": \"function() {}\" }, \"filters\": { \"loo\": \"function() {}\" } }", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/dx", + ?USERX_REQ_HEADERS, + Ddoc + ), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + ?assertEqual(201, Code1), + {ok, Code2, _, _} = test_request:get( + Url ++ "/db/_design/dx/_view/foo", + ?USERX_REQ_HEADERS + ), + ?assertEqual(404, Code2), + {ok, Code3, _, _} = test_request:get( + Url ++ "/db/_design/dx/_show/boo/b", + ?USERX_REQ_HEADERS + ), + ?assertEqual(404, Code3), + {ok, Code4, _, _} = test_request:get( + Url ++ "/db/_design/dx/_list/hoo/foo", + ?USERX_REQ_HEADERS + ), + ?assertEqual(404, Code4), + {ok, Code5, _, _} = test_request:post( + Url ++ "/db/_design/dx/_update/goo", + ?USERX_REQ_HEADERS, + "" + ), + ?assertEqual(404, Code5), + {ok, Code6, _, _} = test_request:get( + Url ++ "/db/_changes?filter=dx/loo", + ?USERX_REQ_HEADERS + ), + ?assertEqual(404, Code6), + {ok, Code7, _, _} = test_request:get( + Url ++ "/db/_changes?filter=_view&view=dx/foo", + ?USERX_REQ_HEADERS + ), + ?assertEqual(404, Code7) + end). + +% Doc updates + +users_with_access_can_update_doc(_PortType, Url) -> + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":2,\"_access\":[\"x\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), + ?_assertEqual(201, Code). + +users_without_access_can_not_update_doc(_PortType, Url) -> + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", + ?USERY_REQ_HEADERS, + "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), + ?_assertEqual(403, Code). + +users_with_access_can_not_change_access(_PortType, Url) -> + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":2,\"_access\":[\"y\"],\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), + ?_assertEqual(403, Code). + +users_with_access_can_not_remove_access(_PortType, Url) -> + {ok, _, _, Body} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {Json} = jiffy:decode(Body), + Rev = couch_util:get_value(<<"rev">>, Json), + {ok, Code, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"a\":2,\"_rev\":\"" ++ binary_to_list(Rev) ++ "\"}" + ), + ?_assertEqual(403, Code). + +% Doc reads + +should_let_admin_read_doc_with_access(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS + ), + ?_assertEqual(200, Code). + +user_with_access_can_read_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), + ?_assertEqual(200, Code). + +user_with_access_can_not_read_conflicted_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"f1\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a?new_edits=false", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"f1\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), + ?_assertEqual(403, Code). + +admin_with_access_can_read_conflicted_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"a\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a?new_edits=false", + ?ADMIN_REQ_HEADERS, + "{\"_id\":\"a\",\"_rev\":\"7-XYZ\",\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS + ), + ?_assertEqual(200, Code). + +user_without_access_can_not_read_doc(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERY_REQ_HEADERS + ), + ?_assertEqual(403, Code). + +user_can_not_read_doc_without_access(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1}" + ), + {ok, Code, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), + ?_assertEqual(403, Code). + +% Doc deletes + +should_let_admin_delete_doc_with_access(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:delete( + Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?ADMIN_REQ_HEADERS + ), + ?_assertEqual(200, Code). + +should_let_user_delete_doc_for_themselves(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:get( + Url ++ "/db/a", + ?USERX_REQ_HEADERS + ), + {ok, Code, _, _} = test_request:delete( + Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?USERX_REQ_HEADERS + ), + ?_assertEqual(200, Code). + +should_not_let_user_delete_doc_for_someone_else(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?USERX_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, Code, _, _} = test_request:delete( + Url ++ "/db/a?rev=1-23202479633c2b380f79507a776743d5", + ?USERY_REQ_HEADERS + ), + ?_assertEqual(403, Code). + +% _all_docs with include_docs + +should_let_admin_fetch_all_docs(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(Body), + ?_assertEqual(4, proplists:get_value(<<"total_rows">>, Json)). + +should_let_user_fetch_their_own_all_docs(_PortType, Url) -> + ?_test(begin + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?USERX_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?USERY_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?USERX_REQ_HEADERS + ), + {Json} = jiffy:decode(Body), + Rows = proplists:get_value(<<"rows">>, Json), + ?assertEqual( + [ + {[ + {<<"id">>, <<"a">>}, + {<<"key">>, <<"a">>}, + {<<"value">>, <<"1-23202479633c2b380f79507a776743d5">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"a">>}, + {<<"_rev">>, <<"1-23202479633c2b380f79507a776743d5">>}, + {<<"a">>, 1}, + {<<"_access">>, [<<"x">>]} + ]}} + ]}, + {[ + {<<"id">>, <<"b">>}, + {<<"key">>, <<"b">>}, + {<<"value">>, <<"1-d33fb05384fa65a8081da2046595de0f">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"b">>}, + {<<"_rev">>, <<"1-d33fb05384fa65a8081da2046595de0f">>}, + {<<"b">>, 2}, + {<<"_access">>, [<<"x">>]} + ]}} + ]} + ], + Rows + ), + ?assertEqual(2, length(Rows)), + ?assertEqual(4, proplists:get_value(<<"total_rows">>, Json)), + + {ok, 200, _, Body1} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?USERY_REQ_HEADERS + ), + {Json1} = jiffy:decode(Body1), + ?assertEqual( + [ + {<<"total_rows">>, 4}, + {<<"offset">>, 2}, + {<<"rows">>, [ + {[ + {<<"id">>, <<"c">>}, + {<<"key">>, <<"c">>}, + {<<"value">>, <<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"c">>}, + {<<"_rev">>, <<"1-92aef5b0e4a3f4db0aba1320869bc95d">>}, + {<<"c">>, 3}, + {<<"_access">>, [<<"y">>]} + ]}} + ]}, + {[ + {<<"id">>, <<"d">>}, + {<<"key">>, <<"d">>}, + {<<"value">>, <<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, + {<<"doc">>, + {[ + {<<"_id">>, <<"d">>}, + {<<"_rev">>, <<"1-ae984f6550038b1ed1565ac4b6cd8c5d">>}, + {<<"d">>, 4}, + {<<"_access">>, [<<"y">>]} + ]}} + ]} + ]} + ], + Json1 + ) + end). + +% _changes + +should_let_admin_fetch_changes(_PortType, Url) -> + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_changes", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(Body), + AmountOfDocs = length(proplists:get_value(<<"results">>, Json)), + ?_assertEqual(4, AmountOfDocs). + +should_let_user_fetch_their_own_changes(_PortType, Url) -> + ?_test(begin + {ok, 201, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + {ok, 201, _, _} = test_request:put( + Url ++ "/db/d", + ?ADMIN_REQ_HEADERS, + "{\"d\":4,\"_access\":[\"y\"]}" + ), + {ok, 200, _, Body} = test_request:get( + Url ++ "/db/_changes", + ?USERX_REQ_HEADERS + ), + {Json} = jiffy:decode(Body), + ?assertMatch( + [ + {<<"results">>, [ + {[ + {<<"seq">>, <<"2-", _/binary>>}, + {<<"id">>, <<"a">>}, + {<<"changes">>, [{[{<<"rev">>, <<"1-23202479633c2b380f79507a776743d5">>}]}]} + ]}, + {[ + {<<"seq">>, <<"3-", _/binary>>}, + {<<"id">>, <<"b">>}, + {<<"changes">>, [{[{<<"rev">>, <<"1-d33fb05384fa65a8081da2046595de0f">>}]}]} + ]} + ]}, + {<<"last_seq">>, <<"3-", _/binary>>}, + {<<"pending">>, 2} + ], + Json + ), + AmountOfDocs = length(proplists:get_value(<<"results">>, Json)), + ?assertEqual(2, AmountOfDocs) + end). + +% views + +should_not_allow_admin_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"x\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?ADMIN_REQ_HEADERS + ), + ?_assertEqual(404, Code1). + +should_not_allow_user_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"x\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?USERX_REQ_HEADERS + ), + ?_assertEqual(404, Code1). + +should_allow_admin_users_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"_users\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?ADMIN_REQ_HEADERS + ), + ?_assertEqual(200, Code1). + +should_allow_user_users_access_ddoc_view_request(_PortType, Url) -> + DDoc = "{\"a\":1,\"_access\":[\"_users\"],\"views\":{\"foo\":{\"map\":\"function() {}\"}}}", + {ok, Code, _, _} = test_request:put( + Url ++ "/db/_design/a", + ?ADMIN_REQ_HEADERS, + DDoc + ), + ?assertEqual(201, Code), + {ok, Code1, _, _} = test_request:get( + Url ++ "/db/_design/a/_view/foo", + ?USERX_REQ_HEADERS + ), + ?_assertEqual(200, Code1). + +% replication + +should_allow_admin_to_replicate_from_access_to_access(_PortType, Url) -> + ?_test(begin + % create target db + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1&access=true", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_admin_to_replicate_from_no_access_to_access(_PortType, Url) -> + ?_test(begin + % create target db + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_admin_to_replicate_from_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create target db + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_admin_to_replicate_from_no_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + {ok, 201, _, _} = test_request:put( + url() ++ "/db3?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db3/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"x\"]}" + ), + + % replicate + AdminUrl = string:replace(Url, "http://", "http://a:a@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(AdminUrl ++ "/db2")}, + {<<"target">>, list_to_binary(AdminUrl ++ "/db3")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?ADMIN_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db3/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_access_to_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1&access=true", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + % ?debugFmt("~nResponseBody: ~p~n", [ResponseBody]), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(2, MissingChecked), + ?assertEqual(2, MissingFound), + ?assertEqual(2, DocsReard), + ?assertEqual(2, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert access in local doc + ReplicationId = couch_util:get_value(<<"replication_id">>, EJResponseBody), + {ok, 200, _, CheckPoint} = test_request:get( + Url ++ "/db/_local/" ++ ReplicationId, + ?USERX_REQ_HEADERS + ), + {EJCheckPoint} = jiffy:decode(CheckPoint), + Access = couch_util:get_value(<<"_access">>, EJCheckPoint), + ?assertEqual([<<"x">>], Access), + + % make sure others can’t read our local docs + {ok, 403, _, _} = test_request:get( + Url ++ "/db/_local/" ++ ReplicationId, + ?USERY_REQ_HEADERS + ), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db2")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(2, MissingChecked), + ?assertEqual(2, MissingFound), + ?assertEqual(2, DocsReard), + ?assertEqual(2, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db2/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_no_access_to_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + % leave for easier debugging + % VduFun = <<"function(newdoc, olddoc, userctx) {if(newdoc._id == \"b\") throw({'forbidden':'fail'})}">>, + % DDoc = {[ + % {<<"_id">>, <<"_design/vdu">>}, + % {<<"validate_doc_update">>, VduFun} + % ]}, + % {ok, _, _, _} = test_request:put(Url ++ "/db/_design/vdu", + % ?ADMIN_REQ_HEADERS, jiffy:encode(DDoc)), + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(2, DocsWritten), + ?assertEqual(1, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(2, proplists:get_value(<<"total_rows">>, Json)) + end). + +should_allow_user_to_replicate_from_no_access_to_no_access(_PortType, Url) -> + ?_test(begin + % create source and target dbs + {ok, 201, _, _} = test_request:put( + url() ++ "/db2?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db2/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + + {ok, 201, _, _} = test_request:put( + url() ++ "/db3?q=1&n=1", + ?ADMIN_REQ_HEADERS, + "" + ), + % set target db security + {ok, _, _, _} = test_request:put( + url() ++ "/db3/_security", + ?ADMIN_REQ_HEADERS, + jiffy:encode(?SECURITY_OBJECT) + ), + % create source docs + {ok, _, _, _} = test_request:put( + Url ++ "/db2/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db2/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + + % replicate + UserXUrl = string:replace(Url, "http://", "http://x:x@"), + EJRequestBody = + {[ + {<<"source">>, list_to_binary(UserXUrl ++ "/db2")}, + {<<"target">>, list_to_binary(UserXUrl ++ "/db3")} + ]}, + {ok, ResponseCode, _, ResponseBody} = test_request:post( + Url ++ "/_replicate", + ?USERX_REQ_HEADERS, + jiffy:encode(EJRequestBody) + ), + + % assert replication status + {EJResponseBody} = jiffy:decode(ResponseBody), + ?assertEqual(ResponseCode, 200), + ?assertEqual(true, couch_util:get_value(<<"ok">>, EJResponseBody)), + [{History}] = couch_util:get_value(<<"history">>, EJResponseBody), + + MissingChecked = couch_util:get_value(<<"missing_checked">>, History), + MissingFound = couch_util:get_value(<<"missing_found">>, History), + DocsReard = couch_util:get_value(<<"docs_read">>, History), + DocsWritten = couch_util:get_value(<<"docs_written">>, History), + DocWriteFailures = couch_util:get_value(<<"doc_write_failures">>, History), + + ?assertEqual(3, MissingChecked), + ?assertEqual(3, MissingFound), + ?assertEqual(3, DocsReard), + ?assertEqual(3, DocsWritten), + ?assertEqual(0, DocWriteFailures), + + % assert docs in target db + {ok, 200, _, ADBody} = test_request:get( + Url ++ "/db3/_all_docs?include_docs=true", + ?ADMIN_REQ_HEADERS + ), + {Json} = jiffy:decode(ADBody), + ?assertEqual(3, proplists:get_value(<<"total_rows">>, Json)) + end). + +% revs_diff +should_not_allow_user_to_revs_diff_other_docs(_PortType, Url) -> + ?_test(begin + % create test docs + {ok, _, _, _} = test_request:put( + Url ++ "/db/a", + ?ADMIN_REQ_HEADERS, + "{\"a\":1,\"_access\":[\"x\"]}" + ), + {ok, _, _, _} = test_request:put( + Url ++ "/db/b", + ?ADMIN_REQ_HEADERS, + "{\"b\":2,\"_access\":[\"x\"]}" + ), + {ok, _, _, V} = test_request:put( + Url ++ "/db/c", + ?ADMIN_REQ_HEADERS, + "{\"c\":3,\"_access\":[\"y\"]}" + ), + + % nothing missing + RevsDiff = + {[ + {<<"a">>, [ + <<"1-23202479633c2b380f79507a776743d5">> + ]} + ]}, + {ok, GoodCode, _, GoodBody} = test_request:post( + Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, + jiffy:encode(RevsDiff) + ), + EJGoodBody = jiffy:decode(GoodBody), + ?assertEqual(200, GoodCode), + ?assertEqual({[]}, EJGoodBody), + + % something missing + MissingRevsDiff = + {[ + {<<"a">>, [ + <<"1-missing">> + ]} + ]}, + {ok, MissingCode, _, MissingBody} = test_request:post( + Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, + jiffy:encode(MissingRevsDiff) + ), + EJMissingBody = jiffy:decode(MissingBody), + ?assertEqual(200, MissingCode), + MissingExpect = + {[ + {<<"a">>, + {[ + {<<"missing">>, [<<"1-missing">>]} + ]}} + ]}, + ?assertEqual(MissingExpect, EJMissingBody), + + % other doc + OtherRevsDiff = + {[ + {<<"c">>, [ + <<"1-92aef5b0e4a3f4db0aba1320869bc95d">> + ]} + ]}, + {ok, OtherCode, _, OtherBody} = test_request:post( + Url ++ "/db/_revs_diff", + ?USERX_REQ_HEADERS, + jiffy:encode(OtherRevsDiff) + ), + EJOtherBody = jiffy:decode(OtherBody), + ?assertEqual(200, OtherCode), + ?assertEqual({[]}, EJOtherBody) + end). +%% ------------------------------------------------------------------ +%% Internal Function Definitions +%% ------------------------------------------------------------------ + +port() -> + integer_to_list(mochiweb_socket_server:get(chttpd, port)). + +% Potential future feature:% +% should_let_user_fetch_their_own_all_docs_plus_users_ddocs(_PortType, Url) -> +% {ok, 201, _, _} = test_request:put(Url ++ "/db/a", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"x\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/_design/foo", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"_users\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/_design/bar", +% ?ADMIN_REQ_HEADERS, "{\"a\":1,\"_access\":[\"houdini\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/b", +% ?USERX_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), +% +% % % TODO: add allowing non-admin users adding non-admin ddocs +% {ok, 201, _, _} = test_request:put(Url ++ "/db/_design/x", +% ?ADMIN_REQ_HEADERS, "{\"b\":2,\"_access\":[\"x\"]}"), +% +% {ok, 201, _, _} = test_request:put(Url ++ "/db/c", +% ?ADMIN_REQ_HEADERS, "{\"c\":3,\"_access\":[\"y\"]}"), +% {ok, 201, _, _} = test_request:put(Url ++ "/db/d", +% ?USERY_REQ_HEADERS, "{\"d\":4,\"_access\":[\"y\"]}"), +% {ok, 200, _, Body} = test_request:get(Url ++ "/db/_all_docs?include_docs=true", +% ?USERX_REQ_HEADERS), +% {Json} = jiffy:decode(Body), +% ?debugFmt("~nHSOIN: ~p~n", [Json]), +% ?_assertEqual(3, length(proplists:get_value(<<"rows">>, Json))). diff --git a/src/couch/test/eunit/couchdb_mrview_cors_tests.erl b/src/couch/test/eunit/couchdb_mrview_cors_tests.erl index 9822542f3f7..5fa547d627a 100644 --- a/src/couch/test/eunit/couchdb_mrview_cors_tests.erl +++ b/src/couch/test/eunit/couchdb_mrview_cors_tests.erl @@ -18,6 +18,7 @@ -define(DDOC, {[ {<<"_id">>, <<"_design/foo">>}, + {<<"_access">>, [<<"user_a">>]}, {<<"shows">>, {[ {<<"bar">>, <<"function(doc, req) {return '