Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 84AC2200C4C for ; Tue, 4 Apr 2017 23:03:55 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 832EA160B77; Tue, 4 Apr 2017 21:03:55 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 7483B160BA1 for ; Tue, 4 Apr 2017 23:03:54 +0200 (CEST) Received: (qmail 53411 invoked by uid 500); 4 Apr 2017 21:03:53 -0000 Mailing-List: contact commits-help@couchdb.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@couchdb.apache.org Delivered-To: mailing list commits@couchdb.apache.org Received: (qmail 53306 invoked by uid 99); 4 Apr 2017 21:03:53 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 04 Apr 2017 21:03:53 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 7715ADFEE9; Tue, 4 Apr 2017 21:03:53 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: davisp@apache.org To: commits@couchdb.apache.org Date: Tue, 04 Apr 2017 21:03:55 -0000 Message-Id: <00ca465ba38b444fbcffdcf6936f3c74@git.apache.org> In-Reply-To: <8652afba727a41a48a621f639429078f@git.apache.org> References: <8652afba727a41a48a621f639429078f@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [3/5] chttpd commit: updated refs/heads/COUCHDB-3288-remove-public-db-record to 04d26cc archived-at: Tue, 04 Apr 2017 21:03:55 -0000 Allow limiting maximum document body size This is the HTTP layer and some tests. The actual checking is done in couch application's from_json_obj/1 function. If a document is too large it will return a 413 response code. The error reason will be the document ID. The intent is to help users identify the document if they used _bulk_docs endpoint. It will also help replicator skip over documents which are too large. COUCHDB-2992 Project: http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/repo Commit: http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/commit/d1848e6f Tree: http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/tree/d1848e6f Diff: http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/diff/d1848e6f Branch: refs/heads/COUCHDB-3288-remove-public-db-record Commit: d1848e6f2288ea9b3758c22f10f75706a87be3b5 Parents: 60f6f6a Author: Nick Vatamaniuc Authored: Mon Mar 13 02:22:19 2017 -0400 Committer: Nick Vatamaniuc Committed: Tue Mar 14 02:57:25 2017 -0400 ---------------------------------------------------------------------- src/chttpd.erl | 2 + src/chttpd_db.erl | 8 +- src/chttpd_show.erl | 2 +- test/chttpd_db_doc_size_tests.erl | 173 +++++++++++++++++++++++++++++++++ 4 files changed, 180 insertions(+), 5 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/blob/d1848e6f/src/chttpd.erl ---------------------------------------------------------------------- diff --git a/src/chttpd.erl b/src/chttpd.erl index 89498c0..76eb7c3 100644 --- a/src/chttpd.erl +++ b/src/chttpd.erl @@ -891,6 +891,8 @@ error_info({missing_stub, Reason}) -> {412, <<"missing_stub">>, Reason}; error_info(request_entity_too_large) -> {413, <<"too_large">>, <<"the request entity is too large">>}; +error_info({request_entity_too_large, DocID}) -> + {413, <<"document_too_large">>, DocID}; error_info({error, security_migration_updates_disabled}) -> {503, <<"security_migration">>, <<"Updates to security docs are disabled during " "security migration.">>}; http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/blob/d1848e6f/src/chttpd_db.erl ---------------------------------------------------------------------- diff --git a/src/chttpd_db.erl b/src/chttpd_db.erl index bb08db6..37e4669 100644 --- a/src/chttpd_db.erl +++ b/src/chttpd_db.erl @@ -325,7 +325,7 @@ db_req(#httpd{method='POST', path_parts=[DbName], user_ctx=Ctx}=Req, Db) -> W = chttpd:qs_value(Req, "w", integer_to_list(mem3:quorum(Db))), Options = [{user_ctx,Ctx}, {w,W}], - Doc = couch_doc:from_json_obj(chttpd:json_body(Req)), + Doc = couch_doc:from_json_obj_validate(chttpd:json_body(Req)), Doc2 = case Doc#doc.id of <<"">> -> Doc#doc{id=couch_uuids:new(), revs={0, []}}; @@ -410,7 +410,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, true -> Docs = lists:map( fun(JsonObj) -> - Doc = couch_doc:from_json_obj(JsonObj), + Doc = couch_doc:from_json_obj_validate(JsonObj), validate_attachment_names(Doc), Id = case Doc#doc.id of <<>> -> couch_uuids:new(); @@ -441,7 +441,7 @@ db_req(#httpd{method='POST',path_parts=[_,<<"_bulk_docs">>], user_ctx=Ctx}=Req, send_json(Req, 417, ErrorsJson) end; false -> - Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray], + Docs = [couch_doc:from_json_obj_validate(JsonObj) || JsonObj <- DocsArray], [validate_attachment_names(D) || D <- Docs], case fabric:update_docs(Db, Docs, [replicated_changes|Options]) of {ok, Errors} -> @@ -1050,7 +1050,7 @@ couch_doc_from_req(Req, DocId, #doc{revs=Revs} = Doc) -> end, Doc#doc{id=DocId, revs=Revs2}; couch_doc_from_req(Req, DocId, Json) -> - couch_doc_from_req(Req, DocId, couch_doc:from_json_obj(Json)). + couch_doc_from_req(Req, DocId, couch_doc:from_json_obj_validate(Json)). % Useful for debugging http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/blob/d1848e6f/src/chttpd_show.erl ---------------------------------------------------------------------- diff --git a/src/chttpd_show.erl b/src/chttpd_show.erl index bbe51b2..49fed7b 100644 --- a/src/chttpd_show.erl +++ b/src/chttpd_show.erl @@ -129,7 +129,7 @@ send_doc_update_response(Req, Db, DDoc, UpdateName, Doc, DocId) -> _ -> Options = [{user_ctx, Req#httpd.user_ctx}, {w, W}] end, - NewDoc = couch_doc:from_json_obj({NewJsonDoc}), + NewDoc = couch_doc:from_json_obj_validate({NewJsonDoc}), couch_doc:validate_docid(NewDoc#doc.id), {UpdateResult, NewRev} = fabric:update_doc(Db, NewDoc, Options), NewRevStr = couch_doc:rev_to_str(NewRev), http://git-wip-us.apache.org/repos/asf/couchdb-chttpd/blob/d1848e6f/test/chttpd_db_doc_size_tests.erl ---------------------------------------------------------------------- diff --git a/test/chttpd_db_doc_size_tests.erl b/test/chttpd_db_doc_size_tests.erl new file mode 100644 index 0000000..d2a993e --- /dev/null +++ b/test/chttpd_db_doc_size_tests.erl @@ -0,0 +1,173 @@ +% Licensed under the Apache License, Version 2.0 (the "License"); you may not +% use this file except in compliance with the License. You may obtain a copy of +% the License at +% +% http://www.apache.org/licenses/LICENSE-2.0 +% +% Unless required by applicable law or agreed to in writing, software +% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +% License for the specific language governing permissions and limitations under +% the License. + +-module(chttpd_db_doc_size_tests). + +-include_lib("couch/include/couch_eunit.hrl"). +-include_lib("couch/include/couch_db.hrl"). + +-define(USER, "chttpd_db_test_admin"). +-define(PASS, "pass"). +-define(AUTH, {basic_auth, {?USER, ?PASS}}). +-define(CONTENT_JSON, {"Content-Type", "application/json"}). +-define(CONTENT_MULTI_RELATED, {"Content-Type", + "multipart/related;boundary=\"bound\""}). +-define(CONTENT_MULTI_FORM, {"Content-Type", + "multipart/form-data;boundary=\"bound\""}). + + +setup() -> + ok = config:set("admins", ?USER, ?PASS, _Persist=false), + ok = config:set("couchdb", "max_document_size", "50"), + TmpDb = ?tempdb(), + Addr = config:get("chttpd", "bind_address", "127.0.0.1"), + Port = mochiweb_socket_server:get(chttpd, port), + Url = lists:concat(["http://", Addr, ":", Port, "/", ?b2l(TmpDb)]), + create_db(Url), + Url. + +teardown(Url) -> + delete_db(Url), + ok = config:delete("admins", ?USER, _Persist=false), + ok = config:delete("couchdb", "max_document_size"). + +create_db(Url) -> + {ok, Status, _, _} = test_request:put(Url, [?CONTENT_JSON, ?AUTH], "{}"), + ?assert(Status =:= 201 orelse Status =:= 202). + +delete_db(Url) -> + {ok, 200, _, _} = test_request:delete(Url, [?AUTH]). + +all_test_() -> + { + "chttpd db max_document_size tests", + { + setup, + fun chttpd_test_util:start_couch/0, fun chttpd_test_util:stop_couch/1, + { + foreach, + fun setup/0, fun teardown/1, + [ + fun post_single_doc/1, + fun put_single_doc/1, + fun bulk_doc/1, + fun put_post_doc_attach_inline/1, + fun put_multi_part_related/1, + fun post_multi_part_form/1 + ] + } + } + }. + +post_single_doc(Url) -> + ?_assertEqual({<<"error">>, <<"document_too_large">>}, + begin + NewDoc = "{\"post_single_doc\": \"some_doc\", + \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}", + {ok, _, _, ResultBody} = test_request:post(Url, + [?CONTENT_JSON, ?AUTH], NewDoc), + {ErrorMsg} = ?JSON_DECODE(ResultBody), + lists:nth(1, ErrorMsg) + end). + +put_single_doc(Url) -> + ?_assertEqual({<<"error">>, <<"document_too_large">>}, + begin + NewDoc = "{\"post_single_doc\": \"some_doc\", + \"_id\": \"testdoc\", \"should_be\" : \"too_large\"}", + {ok, _, _, ResultBody} = test_request:put(Url ++ "/" ++ "testid", + [?CONTENT_JSON, ?AUTH], NewDoc), + {ErrorMsg} = ?JSON_DECODE(ResultBody), + lists:nth(1, ErrorMsg) + end). + +bulk_doc(Url) -> + NewDoc = "{\"docs\": [{\"doc1\": 1}, {\"errordoc\": + \"this_should_be_the_error_document\"}]}", + {ok, _, _, ResultBody} = test_request:post(Url ++ "/_bulk_docs/", + [?CONTENT_JSON, ?AUTH], NewDoc), + ResultJson = ?JSON_DECODE(ResultBody), + Expect = {[{<<"error">>,<<"document_too_large">>},{<<"reason">>,<<>>}]}, + ?_assertEqual(Expect, ResultJson). + +put_post_doc_attach_inline(Url) -> + Body1 = "{\"body\":\"This is a body.\",", + Body2 = lists:concat(["{\"body\":\"This is a body it should fail", + "because there are too many characters.\","]), + DocRest = lists:concat(["\"_attachments\":{\"foo.txt\":{", + "\"content_type\":\"text/plain\",", + "\"data\": \"VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ=\"}}}"]), + Doc1 = lists:concat([Body1, DocRest]), + Doc2 = lists:concat([Body2, DocRest]), + + {ok, _, _, ResultBody} = test_request:post(Url, + [?CONTENT_JSON, ?AUTH], Doc1), + {Msg} = ?JSON_DECODE(ResultBody), + ?_assertEqual({<<"ok">>, true}, lists:nth(1, Msg)), + {ok, _, _, ResultBody1} = test_request:post(Url, + [?CONTENT_JSON, ?AUTH], Doc2), + {Msg1} = ?JSON_DECODE(ResultBody1), + ?_assertEqual({<<"error">>, <<"document_too_large">>}, lists:nth(1, Msg1)), + + {ok, _, _, ResultBody2} = test_request:put(Url ++ "/" ++ "accept", + [?CONTENT_JSON, ?AUTH], Doc1), + {Msg2} = ?JSON_DECODE(ResultBody2), + ?_assertEqual({<<"ok">>, true}, lists:nth(1, Msg2)), + {ok, _, _, ResultBody3} = test_request:put(Url ++ "/" ++ "fail", + [?CONTENT_JSON, ?AUTH], Doc2), + {Msg3} = ?JSON_DECODE(ResultBody3), + ?_assertEqual({<<"error">>, <<"document_too_large">>}, lists:nth(1, Msg3)). + +put_multi_part_related(Url) -> + Body1 = "{\"body\":\"This is a body.\",", + Body2 = lists:concat(["{\"body\":\"This is a body it should fail", + "because there are too many characters.\","]), + DocBeg = "--bound\r\nContent-Type: application/json\r\n\r\n", + DocRest = lists:concat(["\"_attachments\":{\"foo.txt\":{\"follows\":true,", + "\"content_type\":\"text/plain\",\"length\":21},\"bar.txt\":", + "{\"follows\":true,\"content_type\":\"text/plain\",", + "\"length\":20}}}\r\n--bound\r\n\r\nthis is 21 chars long", + "\r\n--bound\r\n\r\nthis is 20 chars lon\r\n--bound--epilogue"]), + Doc1 = lists:concat([DocBeg, Body1, DocRest]), + Doc2 = lists:concat([DocBeg, Body2, DocRest]), + {ok, _, _, ResultBody} = test_request:put(Url ++ "/" ++ "accept", + [?CONTENT_MULTI_RELATED, ?AUTH], Doc1), + {Msg} = ?JSON_DECODE(ResultBody), + ?_assertEqual({<<"ok">>, true}, lists:nth(1, Msg)), + {ok, _, _, ResultBody1} = test_request:put(Url ++ "/" ++ "faildoc", + [?CONTENT_MULTI_RELATED, ?AUTH], Doc2), + {Msg1} = ?JSON_DECODE(ResultBody1), + ?_assertEqual({<<"error">>, <<"document_too_large">>}, lists:nth(1, Msg1)). + +post_multi_part_form(Url) -> + Port = mochiweb_socket_server:get(chttpd, port), + Host = lists:concat([ "http://127.0.0.1:", Port]), + Referer = {"Referer", Host}, + Body1 = "{\"body\":\"This is a body.\"}", + Body2 = lists:concat(["{\"body\":\"This is a body it should fail", + "because there are too many characters.\"}"]), + DocBeg = "--bound\r\nContent-Disposition: form-data; name=\"_doc\"\r\n\r\n", + DocRest = lists:concat(["\r\n--bound\r\nContent-Disposition:", + "form-data; name=\"_attachments\"; filename=\"file.txt\"\r\n", + "Content-Type: text/plain\r\n\r\ncontents of file.txt\r\n\r\n", + "--bound--"]), + Doc1 = lists:concat([DocBeg, Body1, DocRest]), + Doc2 = lists:concat([DocBeg, Body2, DocRest]), + {ok, _, _, ResultBody} = test_request:post(Url ++ "/" ++ "accept", + [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc1), + {Msg} = ?JSON_DECODE(ResultBody), + ?_assertEqual({<<"ok">>, true}, lists:nth(1, Msg)), + {ok, _, _, ResultBody1} = test_request:post(Url ++ "/" ++ "fail", + [?CONTENT_MULTI_FORM, ?AUTH, Referer], Doc2), + {Msg1} = ?JSON_DECODE(ResultBody1), + ?_assertEqual({<<"error">>, <<"document_too_large">>}, lists:nth(1, Msg1)). +