couchdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dam...@apache.org
Subject svn commit: r749468 - in /couchdb/branches/rep_security: share/www/script/ src/couchdb/
Date Mon, 02 Mar 2009 23:20:45 GMT
Author: damien
Date: Mon Mar  2 23:20:44 2009
New Revision: 749468

URL: http://svn.apache.org/viewvc?rev=749468&view=rev
Log:
All tests pass, but all or nothing transactions removed, both with and without conflict checking.

Modified:
    couchdb/branches/rep_security/share/www/script/couch.js
    couchdb/branches/rep_security/share/www/script/couch_tests.js
    couchdb/branches/rep_security/src/couchdb/couch_db.erl
    couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl
    couchdb/branches/rep_security/src/couchdb/couch_doc.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl
    couchdb/branches/rep_security/src/couchdb/couch_rep.erl
    couchdb/branches/rep_security/src/couchdb/couch_util.erl

Modified: couchdb/branches/rep_security/share/www/script/couch.js
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/share/www/script/couch.js?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/share/www/script/couch.js [utf-8] (original)
+++ couchdb/branches/rep_security/share/www/script/couch.js [utf-8] Mon Mar  2 23:20:44 2009
@@ -102,11 +102,11 @@
       body: JSON.stringify({"docs": docs})
     });
     CouchDB.maybeThrowError(this.last_req);
-    var result = JSON.parse(this.last_req.responseText);
+    var results = JSON.parse(this.last_req.responseText);
     for (var i = 0; i < docs.length; i++) {
-        docs[i]._rev = result.new_revs[i].rev;
+        docs[i]._rev = results[i].rev;
     }
-    return result;
+    return results;
   }
   
   this.ensureFullCommit = function() {

Modified: couchdb/branches/rep_security/share/www/script/couch_tests.js
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/share/www/script/couch_tests.js?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/share/www/script/couch_tests.js [utf-8] (original)
+++ couchdb/branches/rep_security/share/www/script/couch_tests.js [utf-8] Mon Mar  2 23:20:44 2009
@@ -501,28 +501,25 @@
     var docs = makeDocs(5);
 
     // Create the docs
-    var result = db.bulkSave(docs);
-    T(result.ok);
-    T(result.new_revs.length == 5);
+    var results = db.bulkSave(docs);
+    T(results.length == 5);
     for (var i = 0; i < 5; i++) {
-      T(result.new_revs[i].id == docs[i]._id);
-      T(result.new_revs[i].rev);
+      T(results[i].id == docs[i]._id);
+      T(results[i].rev);
       docs[i].string = docs[i].string + ".00";
     }
 
     // Update the docs
-    result = db.bulkSave(docs);
-    T(result.ok);
-    T(result.new_revs.length == 5);
+    results = db.bulkSave(docs);
+    T(results.length == 5);
     for (i = 0; i < 5; i++) {
-      T(result.new_revs[i].id == i.toString());
+      T(results[i].id == i.toString());
       docs[i]._deleted = true;
     }
 
     // Delete the docs
-    result = db.bulkSave(docs);
-    T(result.ok);
-    T(result.new_revs.length == 5);
+    results = db.bulkSave(docs);
+    T(results.length == 5);
     for (i = 0; i < 5; i++) {
       T(db.open(docs[i]._id) == null);
     }
@@ -531,10 +528,10 @@
     var req = CouchDB.request("POST", "/test_suite_db/_bulk_docs", {
       body: JSON.stringify({"docs": [{"foo":"bar"}]})
     });
-    result = JSON.parse(req.responseText);
+    results = JSON.parse(req.responseText);
     
-    T(result.new_revs[0].id != "");
-    T(result.new_revs[0].rev != "");
+    T(results[0].id != "");
+    T(results[0].rev != "");
   },
 
   // test saving a semi-large quanitity of documents and do some view queries.
@@ -551,7 +548,7 @@
     for(var i=0; i < numDocsToCreate; i += 100) {
         var createNow = Math.min(numDocsToCreate - i, 100);
         var docs = makeDocs(i, i + createNow);
-        T(db.bulkSave(docs).ok);
+        db.bulkSave(docs);
     }
 
     // query all documents, and return the doc.integer member as a key.
@@ -588,7 +585,7 @@
     if (debug) debugger;
     var numDocs = 500
     var docs = makeDocs(1,numDocs + 1);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
     var summate = function(N) {return (N+1)*N/2;};
 
     var map = function (doc) {
@@ -636,7 +633,7 @@
         docs.push({keys:["d", "a"]});
         docs.push({keys:["d", "b"]});
         docs.push({keys:["d", "c"]});
-        T(db.bulkSave(docs).ok);
+        db.bulkSave(docs);
         T(db.info().doc_count == ((i - 1) * 10 * 11) + ((j + 1) * 11));
       }
 
@@ -728,7 +725,7 @@
       docs.push({val:80});
       docs.push({val:90});
       docs.push({val:100});
-      T(db.bulkSave(docs).ok);
+      db.bulkSave(docs);
     }
     
     var results = db.query(map, reduceCombine);
@@ -747,7 +744,7 @@
 
     var numDocs = 5;
     var docs = makeDocs(1,numDocs + 1);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
     var summate = function(N) {return (N+1)*N/2;};
 
     var designDoc = {
@@ -1419,7 +1416,7 @@
     }
     T(db.save(designDoc).ok);
 
-    T(db.bulkSave(makeDocs(1, numDocs + 1)).ok);
+    db.bulkSave(makeDocs(1, numDocs + 1));
 
     // test that the _all_docs view returns correctly with keys
     var results = db.allDocs({startkey:"_design", endkey:"_design0"});
@@ -1636,7 +1633,7 @@
     if (debug) debugger;
 
     var docs = makeDocs(0, 100);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
 
     var designDoc = {
       _id:"_design/test",
@@ -1736,7 +1733,7 @@
     if (debug) debugger;
 
     var docs = makeDocs(0, 100);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
 
     var keys = ["10","15","30","37","50"];
     var rows = db.allDocs({},keys).rows;
@@ -1779,7 +1776,7 @@
     if (debug) debugger;
 
     var docs = makeDocs(0, 100);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
 
     var designDoc = {
       _id:"_design/test",
@@ -1906,7 +1903,7 @@
     if (debug) debugger;
 
     var docs = makeDocs(0, 100);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
 
     var queryFun = function(doc) { emit(doc.integer, doc.integer) };
     var reduceFun = function (keys, values) { return sum(values); };
@@ -1932,7 +1929,7 @@
     if (debug) debugger;
 
     var docs = makeDocs(0, 100);
-    T(db.bulkSave(docs).ok);
+    db.bulkSave(docs);
 
     var queryFun = function(doc) { emit(doc.integer, null) };
     var i;
@@ -2141,7 +2138,7 @@
         simple_test: new function () {
           this.init = function(dbA, dbB) {
             var docs = makeDocs(0, numDocs);
-            T(dbA.bulkSave(docs).ok);
+            dbA.bulkSave(docs);
           };
         
           this.afterAB1 = function(dbA, dbB) {          
@@ -2728,8 +2725,7 @@
     T(db.save(designDoc).ok);
     
     var docs = makeDocs(0, 10);
-    var saveResult = db.bulkSave(docs);
-    T(saveResult.ok);
+    db.bulkSave(docs);
     
     var view = db.view('lists/basicView');
     T(view.total_rows == 10);
@@ -2790,8 +2786,7 @@
     db.createDb();
     if (debug) debugger;
     var docs = makeDocs(0, 10);
-    var saveResult = db.bulkSave(docs);
-    T(saveResult.ok);
+    db.bulkSave(docs);
 
     var binAttDoc = {
       _id: "bin_doc",
@@ -2856,7 +2851,7 @@
     
     T(db.save(designDoc).ok);
 
-    T(db.bulkSave(makeDocs(1, numDocs + 1)).ok);
+    db.bulkSave(makeDocs(1, numDocs + 1));
 
     // go ahead and validate the views before purging
     var rows = db.view("test/all_docs_twice").rows;

Modified: couchdb/branches/rep_security/src/couchdb/couch_db.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_db.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_db.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_db.erl Mon Mar  2 23:20:44 2009
@@ -193,10 +193,10 @@
     
 update_doc(Db, Doc, Options) ->
     case update_docs(Db, [Doc], Options) of
-    {ok, [NewRev], _} ->
+    {ok, [{ok, NewRev}]} ->
         {ok, NewRev};
-    {conflicts, [ConflictRev]} ->
-        throw({conflict, ConflictRev})
+    {ok, [Error]} ->
+        throw(Error)
     end.
 
 update_docs(Db, Docs) ->
@@ -225,35 +225,38 @@
 
 
 validate_doc_update(#db{user_ctx=UserCtx, admins=Admins},
-        #doc{id= <<"_design/",_/binary>>}=Doc, _GetDiskDocFun) ->
+        #doc{id= <<"_design/",_/binary>>}, _GetDiskDocFun) ->
     UserNames = [UserCtx#user_ctx.name | UserCtx#user_ctx.roles],
     % if the user is a server admin or db admin, allow the save
     case length(UserNames -- [<<"_admin">> | Admins]) == length(UserNames) of
     true ->
         % not an admin
-        throw({unauthorized, <<"You are not a server or database admin.">>});
+        {unauthorized, <<"You are not a server or database admin.">>};
     false ->
-        Doc
+        ok
     end;
-validate_doc_update(#db{validate_doc_funs=[]}, Doc, _GetDiskDocFun) ->
-    Doc;
-validate_doc_update(_Db, #doc{id= <<"_local/",_/binary>>}=Doc, _GetDiskDocFun) ->
-    Doc;
+validate_doc_update(#db{validate_doc_funs=[]}, _Doc, _GetDiskDocFun) ->
+    ok;
+validate_doc_update(_Db, #doc{id= <<"_local/",_/binary>>}, _GetDiskDocFun) ->
+    ok;
 validate_doc_update(#db{name=DbName,user_ctx=Ctx}=Db, Doc, GetDiskDocFun) ->
     DiskDoc = GetDiskDocFun(),
     JsonCtx =  {[{<<"db">>, DbName},
             {<<"name">>,Ctx#user_ctx.name},
             {<<"roles">>,Ctx#user_ctx.roles}]},
-    [case Fun(Doc, DiskDoc, JsonCtx) of
-        ok -> ok;
-        Error -> throw(Error)
-    end || Fun <- Db#db.validate_doc_funs],
-    Doc.
+    try [case Fun(Doc, DiskDoc, JsonCtx) of
+            ok -> ok;
+            Error -> throw(Error)
+        end || Fun <- Db#db.validate_doc_funs],
+        ok
+    catch
+        throw:Error ->
+            Error
+    end.
 
 
 prep_and_validate_update(Db, #doc{id=Id,revs={RevStart, [_NewRev|PrevRevs]}}=Doc,
         OldFullDocInfo, LeafRevsDict) ->
-    NilDocFun = fun() -> nil end,
     case PrevRevs of
     [PrevRev|_] ->
         case dict:find({RevStart-1, PrevRev}, LeafRevsDict) of
@@ -262,58 +265,76 @@
             true ->
                 DiskDoc = make_doc(Db, Id, Deleted, DiskSp, DiskRevs),
                 Doc2 = couch_doc:merge_stubs(Doc, DiskDoc),
-                {ok, validate_doc_update(Db, Doc2, fun() -> DiskDoc end)};
+                {validate_doc_update(Db, Doc2, fun() -> DiskDoc end), Doc2};
             false ->
                 LoadDiskDoc = fun() -> make_doc(Db,Id,Deleted,DiskSp,DiskRevs) end,
-                {ok, validate_doc_update(Db, Doc, LoadDiskDoc)}
+                {validate_doc_update(Db, Doc, LoadDiskDoc), Doc}
             end;
         error ->
-            {conflict, validate_doc_update(Db, Doc, NilDocFun)}
+            {conflict, Doc}
         end;
     [] ->
         % new doc, and we have existing revs.
         if OldFullDocInfo#full_doc_info.deleted ->
             % existing docs are deletions
-            {ok, validate_doc_update(Db, Doc, NilDocFun)};
+            {validate_doc_update(Db, Doc, fun() -> nil end), Doc};
         true ->
-            {conflict, validate_doc_update(Db, Doc, NilDocFun)}
+            {conflict, Doc}
         end
     end.
 
 
 
-prep_and_validate_updates(_Db, [], [], AccPrepped, AccConflicts) ->
-   {AccPrepped, AccConflicts};
-prep_and_validate_updates(Db, [DocBucket|RestBuckets], [not_found|RestLookups], AccPrepped, AccConflicts) ->
-    % no existing revs are known, make sure no old revs specified.
-    AccConflicts2 = [Doc || #doc{revs=[_NewRev,_OldRev|_]=Doc} <- DocBucket] ++ AccConflicts,
-    AccPrepped2 = [[validate_doc_update(Db, Doc, fun()-> nil end) || Doc <- DocBucket] | AccPrepped],
-    prep_and_validate_updates(Db, RestBuckets, RestLookups, AccPrepped2, AccConflicts2);
+prep_and_validate_updates(_Db, [], [], AccPrepped, AccFatalErrors) ->
+   {AccPrepped, AccFatalErrors};
+prep_and_validate_updates(Db, [DocBucket|RestBuckets], [not_found|RestLookups], AccPrepped, AccErrors) ->
+    [#doc{id=Id}|_]=DocBucket,
+    % no existing revs are known,
+    {PreppedBucket, AccErrors3} = lists:foldl(
+        fun(#doc{revs=Revs}=Doc, {AccBucket, AccErrors2}) ->
+            case Revs of
+            {Pos, [NewRev,_OldRev|_]} ->
+                % old revs specified but none exist, a conflict
+                {AccBucket, [{{Id, {Pos, NewRev}}, conflict} | AccErrors2]};
+            {Pos, [NewRev]} ->
+                case validate_doc_update(Db, Doc, fun() -> nil end) of
+                ok ->
+                    {[Doc | AccBucket], AccErrors2};
+                Error ->
+                    {AccBucket, [{{Id, {Pos, NewRev}}, Error} | AccErrors2]}
+                end
+            end
+        end,
+        {[], AccErrors}, DocBucket),
+
+    prep_and_validate_updates(Db, RestBuckets, RestLookups,
+            [PreppedBucket | AccPrepped], AccErrors3);
 prep_and_validate_updates(Db, [DocBucket|RestBuckets],
         [{ok, #full_doc_info{rev_tree=OldRevTree}=OldFullDocInfo}|RestLookups],
-        AccPrepped, AccConflicts) ->
+        AccPrepped, AccErrors) ->
     Leafs = couch_key_tree:get_all_leafs(OldRevTree),
     LeafRevsDict = dict:from_list([{{Start, RevId}, {Deleted, Sp, Revs}} ||
             {{Deleted, Sp}, {Start, [RevId|_]}=Revs} <- Leafs]),
-    {Prepped, AccConflicts2} = lists:foldl(
-        fun(Doc, {Docs2Acc, Conflicts2Acc}) ->
+    {PreppedBucket, AccErrors3} = lists:foldl(
+        fun(Doc, {Docs2Acc, AccErrors2}) ->
             case prep_and_validate_update(Db, Doc, OldFullDocInfo,
                     LeafRevsDict) of
             {ok, Doc} ->
-                {[Doc | Docs2Acc], Conflicts2Acc};
-            {conflict, Doc} ->
-                {[Doc | Docs2Acc], [Doc|Conflicts2Acc]}
+                {[Doc | Docs2Acc], AccErrors2};
+            {Error, #doc{id=Id,revs={Pos, [NewRev|_]}}} ->
+                % Record the error
+                {Docs2Acc, [{{Id, {Pos, NewRev}}, Error} |AccErrors2]}
             end
         end,
-        {[], AccConflicts}, DocBucket),
-    prep_and_validate_updates(Db, RestBuckets, RestLookups, [Prepped | AccPrepped], AccConflicts2).
+        {[], AccErrors}, DocBucket),
+    prep_and_validate_updates(Db, RestBuckets, RestLookups, [PreppedBucket | AccPrepped], AccErrors3).
 
 
 update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options) ->
     update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options, interactive_edit).
     
 should_validate(Db, Docs) ->
-    % true if our db has validation funs, there are design docs,
+    % true if our db has validation funs, we have design docs,
     % or we have attachments.
     (Db#db.validate_doc_funs /= []) orelse
         lists:any(
@@ -323,6 +344,58 @@
                 Atts /= []
             end, Docs).
 
+validate_replicated_updates(_Db, [], [], AccPrepped, AccErrors) ->
+    Errors2 = [{{Id, {Pos, Rev}}, Error} || 
+            {#doc{id=Id,revs={Pos,[Rev|_]}}, Error} <- AccErrors],
+    {lists:reverse(AccPrepped), lists:reverse(Errors2)};
+validate_replicated_updates(Db, [Bucket|RestBuckets], [OldInfo|RestOldInfo], AccPrepped, AccErrors) ->
+    case OldInfo of
+    not_found ->
+        {ValidatedBucket, AccErrors3} = lists:foldl(
+            fun(Doc, {AccPrepped2, AccErrors2}) ->
+                case validate_doc_update(Db, Doc, fun() -> nil end) of
+                ok ->
+                    {[Doc | AccPrepped2], AccErrors2};
+                Error ->
+                    {AccPrepped2, [{Doc, Error} | AccErrors2]}
+                end
+            end,
+            {[], AccErrors}, Bucket),
+        validate_replicated_updates(Db, RestBuckets, RestOldInfo, [ValidatedBucket | AccPrepped], AccErrors3);
+    {ok, #full_doc_info{rev_tree=OldTree}} ->
+        NewRevTree = lists:foldl(
+            fun(NewDoc, AccTree) ->
+                {NewTree, _} = couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]),
+                NewTree
+            end,
+            OldTree, Bucket),
+        Leafs = couch_key_tree:get_all_leafs_full(NewRevTree),
+        LeafRevsFullDict = dict:from_list( [{{Start, RevId}, FullPath} || {Start, [{RevId, _}|_]}=FullPath <- Leafs]),
+        {ValidatedBucket, AccErrors3} =
+        lists:foldl(
+            fun(#doc{id=Id,revs={Pos, [RevId|_]}}=Doc, {AccValidated, AccErrors2}) ->
+                case dict:find({Pos, RevId}, LeafRevsFullDict) of
+                {ok, {Start, Path}} ->
+                    % our unflushed doc is a leaf node. Go back on the path 
+                    % to find the previous rev that's on disk.
+                    LoadPrevRev = fun() ->
+                        make_first_doc_on_disk(Db, Id, Start - 1, tl(Path))
+                    end,
+                    case validate_doc_update(Db, Doc, LoadPrevRev) of
+                    ok ->
+                        {[Doc | AccValidated], AccErrors2};
+                    Error ->
+                        {AccValidated, [{Doc, Error} | AccErrors2]}
+                    end;
+                _ ->
+                    % this doc isn't a leaf or already exists in the tree.
+                    % ignore but consider it a success.
+                    {AccValidated, AccErrors2}
+                end
+            end,
+            {[], []}, Bucket),
+        validate_replicated_updates(Db, RestBuckets, RestOldInfo, [ValidatedBucket | AccPrepped], AccErrors3)
+    end.
 
 update_docs(Db, Docs, Options, replicated_changes) ->
     DocBuckets = group_alike_docs(Docs),
@@ -332,41 +405,15 @@
         Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
         ExistingDocs = get_full_doc_infos(Db, Ids),
     
-        DocBuckets2 = lists:zipwith(
-            fun(Bucket, not_found) ->
-                [validate_doc_update(Db, Doc, fun()-> nil end) || Doc <- Bucket];
-            (Bucket, {ok, #full_doc_info{rev_tree=OldTree}}) ->
-                NewRevTree = lists:foldl(
-                    fun(NewDoc, AccTree) ->
-                        {NewTree, _} = couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]),
-                        NewTree
-                    end,
-                    OldTree, Bucket),
-                Leafs = couch_key_tree:get_all_leafs_full(NewRevTree),
-                LeafRevsFullDict = dict:from_list( [{{Start, RevId}, FullPath} || {Start, [{RevId, _}|_]}=FullPath <- Leafs]),
-                lists:flatmap(
-                    fun(#doc{id=Id,revs={Pos, [RevId|_]}}=Doc) ->
-                        case dict:find({Pos, RevId}, LeafRevsFullDict) of
-                        {ok, {Start, Path}} ->
-                            % our unflushed doc is a leaf node. Go back on the path 
-                            % to find the previous rev that's on disk.
-                            LoadPrevRev = fun() ->
-                                make_first_doc_on_disk(Db, Id, Start - 1, lists:tail(Path))
-                            end,
-                            [validate_doc_update(Db, Doc, LoadPrevRev)];
-                        _ ->
-                            % this doc isn't a leaf or already exists in the tree. ignore
-                            []
-                        end
-                    end, Bucket)
-            end,
-            DocBuckets, ExistingDocs),
+        {DocBuckets2, DocErrors} =
+                validate_replicated_updates(Db, DocBuckets, ExistingDocs, [], []),
         DocBuckets3 = [Bucket || [_|_]=Bucket <- DocBuckets2]; % remove empty buckets
     false ->
+        DocErrors = [],
         DocBuckets3 = DocBuckets
     end,
-    {ok, _} = write_and_commit(Db, DocBuckets3, [merge_conflicts | Options]),
-    ok;
+    {ok, []} = write_and_commit(Db, DocBuckets3, [merge_conflicts | Options]),
+    {ok, DocErrors};
     
 update_docs(Db, Docs, Options, interactive_edit) ->
     % go ahead and generate the new revision ids for the documents.
@@ -390,37 +437,25 @@
         Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
         ExistingDocInfos = get_full_doc_infos(Db, Ids),
     
-        {DocBuckets2, PreConflicts} = prep_and_validate_updates(Db, DocBuckets, ExistingDocInfos, [], []),
-    
-        case PreConflicts of
-        [] ->
-            Continue = ok;
-        _ ->
-            case lists:member(merge_conflicts, Options) of
-            true -> Continue = ok;
-            false -> Continue = {conflicts, PreConflicts}
-            end
-        end;
+        {DocBucketsPrepped, Failures} = prep_and_validate_updates(Db, DocBuckets, ExistingDocInfos, [], []),
+        % strip out any empty buckets
+        DocBuckets2 = [Bucket || [_|_] = Bucket <- DocBucketsPrepped];
     false ->
-        Continue = ok,
+        Failures = [],
         DocBuckets2 = DocBuckets
     end,
-    if Continue == ok ->
-        case write_and_commit(Db, DocBuckets2, Options) of
-        {ok, SavedConflicts} ->
-            {ok, docs_to_revs(Docs2), SavedConflicts};
-        {conflicts, Conflicts} ->
-            {conflicts, Conflicts}
-        end;
-    true ->
-        Continue
-    end.
-    
-
-docs_to_revs([]) ->
-    [];
-docs_to_revs([#doc{revs={Start,[RevId|_]}} | Rest]) ->
-    [{Start, RevId} | docs_to_revs(Rest)].
+    {ok, CommitFailures} = write_and_commit(Db, DocBuckets2, Options),
+    FailDict = dict:from_list(CommitFailures ++ Failures),
+    % the output for each is either {ok, NewRev} or Error
+    {ok, lists:map(
+        fun(#doc{id=Id,revs={Pos, [NewRevId|_]}}) ->
+            case dict:find({Id, {Pos, NewRevId}}, FailDict) of
+            {ok, Error} ->
+                Error;
+            error ->
+                {ok, {Pos, NewRevId}}
+            end
+        end, Docs2)}.
 
 % Returns the first available document on disk. Input list is a full rev path
 % for the doc.
@@ -438,8 +473,7 @@
     % flush unwritten binaries to disk.
     DocBuckets2 = [[doc_flush_binaries(Doc, Db#db.fd) || Doc <- Bucket] || Bucket <- DocBuckets],
     case gen_server:call(UpdatePid, {update_docs, DocBuckets2, Options}, infinity) of
-    {ok, SavedConflicts} -> {ok, SavedConflicts};
-    {conflicts, Conflicts} -> {conflicts, Conflicts};
+    {ok, Conflicts} -> {ok, Conflicts};
     retry ->
         % This can happen if the db file we wrote to was swapped out by
         % compaction. Retry by reopening the db and writing to the current file
@@ -448,8 +482,7 @@
         % We only retry once
         close(Db2),
         case gen_server:call(UpdatePid, {update_docs, DocBuckets3, Options}, infinity) of
-        {ok, SavedConflicts} -> {ok, SavedConflicts};
-        {conflicts, Conflicts} -> {conflicts, Conflicts};
+        {ok, Conflicts} -> {ok, Conflicts};
         Else -> throw(Else)
         end
     end.

Modified: couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl Mon Mar  2 23:20:44 2009
@@ -41,11 +41,6 @@
 terminate(_Reason, Db) ->
     close_db(Db).
 
-docs_to_revs([]) ->
-    [];
-docs_to_revs([#doc{revs={Start,[RevId|_]}} | Rest]) ->
-    [{Start, RevId} | docs_to_revs(Rest)].
-
 handle_call(get_db, _From, Db) ->
     {reply, {ok, Db}, Db};
 handle_call({update_docs, DocActions, Options}, _From, Db) ->
@@ -53,12 +48,10 @@
     {ok, Conflicts, Db2} ->
         ok = gen_server:call(Db#db.main_pid, {db_updated, Db2}),
         couch_db_update_notifier:notify({updated, Db2#db.name}),
-        {reply, {ok, docs_to_revs(Conflicts)}, Db2}
+        {reply, {ok, Conflicts}, Db2}
     catch
         throw: retry ->
-            {reply, retry, Db};
-        throw: {conflicts, Conflicts} ->
-            {reply, {conflicts, docs_to_revs(Conflicts)}, Db}
+            {reply, retry, Db}
     end;
 handle_call(full_commit, _From, #db{waiting_delayed_commit=nil}=Db) ->
     {reply, ok, Db}; % no data waiting, return ok immediately
@@ -367,16 +360,17 @@
         end, Unflushed),
     flush_trees(Db, RestUnflushed, [InfoUnflushed#full_doc_info{rev_tree=Flushed} | AccFlushed]).
 
-merge_rev_trees([], [], AccNewInfos, AccConflicts, AccSeq) ->
+merge_rev_trees(_MergeConflicts, [], [], AccNewInfos, AccConflicts, AccSeq) ->
     {ok, lists:reverse(AccNewInfos), AccConflicts, AccSeq};
-merge_rev_trees([NewDocs|RestDocsList],
+merge_rev_trees(MergeConflicts, [NewDocs|RestDocsList],
         [OldDocInfo|RestOldInfo], AccNewInfos, AccConflicts, AccSeq) ->
     #full_doc_info{id=Id,rev_tree=OldTree,deleted=OldDeleted}=OldDocInfo,
     {NewRevTree, NewConflicts} = lists:foldl(
-        fun(NewDoc, {AccTree, AccConflicts2}) ->
+        fun(#doc{revs={Pos,[Rev|_]}}=NewDoc, {AccTree, AccConflicts2}) ->
             case couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]) of
-            {NewTree, conflicts} when not OldDeleted ->
-                {NewTree, [NewDoc | AccConflicts2]};
+            {_NewTree, conflicts}
+                    when (not OldDeleted) and (not MergeConflicts) ->
+                {AccTree, [{{Id, {Pos,Rev}}, conflict} | AccConflicts2]};
             {NewTree, _} ->
                 {NewTree, AccConflicts2}
             end
@@ -384,12 +378,12 @@
         {OldTree, AccConflicts}, NewDocs),
     if NewRevTree == OldTree ->
         % nothing changed
-        merge_rev_trees(RestDocsList, RestOldInfo, AccNewInfos,
+        merge_rev_trees(MergeConflicts, RestDocsList, RestOldInfo, AccNewInfos,
                 NewConflicts, AccSeq);
     true ->
         % we have updated the document, give it a new seq #
         NewInfo = #full_doc_info{id=Id,update_seq=AccSeq+1,rev_tree=NewRevTree},
-        merge_rev_trees(RestDocsList,RestOldInfo, 
+        merge_rev_trees(MergeConflicts, RestDocsList,RestOldInfo, 
                 [NewInfo|AccNewInfos], NewConflicts, AccSeq+1)
     end.
 
@@ -431,28 +425,23 @@
         Ids, OldDocLookups),
     
     % Merge the new docs into the revision trees.
-    {ok, NewDocInfos, Conflicts, NewSeq} =
-            merge_rev_trees(DocsList2, OldDocInfos, [], [], LastSeq),
-    
-    case (Conflicts /= []) and (not lists:member(merge_conflicts, Options)) of
-    true -> throw({conflicts, Conflicts});
-    false -> ok
-    end,
-    
+    {ok, NewDocInfos, Conflicts, NewSeq} = merge_rev_trees(
+            lists:member(merge_conflicts, Options),
+            DocsList2, OldDocInfos, [], [], LastSeq),
     RemoveSeqs =
         [OldSeq || {ok, #full_doc_info{update_seq=OldSeq}} <- OldDocLookups],
     
-    % All regular documents are now ready to write.
+    % All documents are now ready to write.
+    
+    {ok, LocalConflicts, Db2}  = update_local_docs(Db, NonRepDocs),
     
-    % Try to write the local documents first, a conflict might be generated
-    {ok, Db2}  = update_local_docs(Db, NonRepDocs),
     % Write out the document summaries (the bodies are stored in the nodes of
     % the trees, the attachments are already written to disk)
     {ok, FlushedDocInfos} = flush_trees(Db2, NewDocInfos, []),
     
     {ok, InfoById, InfoBySeq} = new_index_entries(FlushedDocInfos, [], []),
 
-    % and the indexes to the documents
+    % and the indexes
     {ok, DocInfoBySeqBTree2} = couch_btree:add_remove(DocInfoBySeqBTree, InfoBySeq, RemoveSeqs),
     {ok, DocInfoByIdBTree2} = couch_btree:add_remove(DocInfoByIdBTree, InfoById, []),
 
@@ -470,18 +459,15 @@
         Db4 = refresh_validate_doc_funs(Db3)
     end,
     
-    {ok, Conflicts, commit_data(Db4, not lists:member(full_commit, Options))}.
+    {ok, LocalConflicts ++ Conflicts, 
+            commit_data(Db4, not lists:member(full_commit, Options))}.
     
 update_local_docs(#db{local_docs_btree=Btree}=Db, Docs) ->
     Ids = [Id || #doc{id=Id} <- Docs],
     OldDocLookups = couch_btree:lookup(Btree, Ids),
     BtreeEntries = lists:zipwith(
-        fun(#doc{id=Id,deleted=Delete,revs={0,Revs},body=Body}=Doc, OldDocLookup) ->
-            NewRev =
-            case Revs of
-                [] -> 0;
-                [RevStr] -> list_to_integer(?b2l(RevStr))
-            end,
+        fun(#doc{id=Id,deleted=Delete,revs={0,[RevStr]},body=Body}, OldDocLookup) ->
+            NewRev = list_to_integer(?b2l(RevStr)),
             OldRev =
             case OldDocLookup of
                 {ok, {_, {OldRev0, _}}} -> OldRev0;
@@ -494,18 +480,19 @@
                     true  -> {remove, Id}
                 end;
             false ->
-                throw({conflicts, [Doc]})
+                {conflict, {Id, {0, RevStr}}}
             end
             
         end, Docs, OldDocLookups),
 
     BtreeIdsRemove = [Id || {remove, Id} <- BtreeEntries],
     BtreeIdsUpdate = [ByIdDocInfo || {update, ByIdDocInfo} <- BtreeEntries],
+    Conflicts = [{conflict, IdRev} || {conflict, IdRev} <- BtreeEntries],
     
     {ok, Btree2} =
         couch_btree:add_remove(Btree, BtreeIdsUpdate, BtreeIdsRemove),
 
-    {ok, Db#db{local_docs_btree = Btree2}}.
+    {ok, Conflicts, Db#db{local_docs_btree = Btree2}}.
 
 
 commit_data(Db) ->

Modified: couchdb/branches/rep_security/src/couchdb/couch_doc.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_doc.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_doc.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_doc.erl Mon Mar  2 23:20:44 2009
@@ -12,7 +12,7 @@
 
 -module(couch_doc).
 
--export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,to_rev_str/1,to_rev_strs/1]).
+-export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,rev_to_str/1,rev_to_strs/1]).
 -export([bin_foldl/3,bin_size/1,bin_to_binary/1,get_validate_doc_fun/1]).
 -export([from_json_obj/1,to_json_obj/2,has_stubs/1, merge_stubs/2]).
 
@@ -37,28 +37,28 @@
                         {<<"ids">>, RevIds}]}}]
     end.
 
-to_rev_str({Pos, RevId}) ->
+rev_to_str({Pos, RevId}) ->
     ?l2b([integer_to_list(Pos),"-",RevId]).
 
-to_rev_strs([]) ->
+rev_to_strs([]) ->
     [];
-to_rev_strs([{Pos, RevId}| Rest]) ->
-    [to_rev_str({Pos, RevId}) | to_rev_strs(Rest)].
+rev_to_strs([{Pos, RevId}| Rest]) ->
+    [rev_to_str({Pos, RevId}) | rev_to_strs(Rest)].
 
 to_json_meta(Meta) ->
     lists:map(
         fun({revs_info, Start, RevsInfo}) ->
             {JsonRevsInfo, _Pos}  = lists:mapfoldl(
                 fun({RevId, Status}, PosAcc) ->
-                    JsonObj = {[{<<"rev">>, to_rev_str({PosAcc, RevId})},
+                    JsonObj = {[{<<"rev">>, rev_to_str({PosAcc, RevId})},
                         {<<"status">>, ?l2b(atom_to_list(Status))}]},
                     {JsonObj, PosAcc - 1}
                 end, Start, RevsInfo),
             {<<"_revs_info">>, JsonRevsInfo};
         ({conflicts, Conflicts}) ->
-            {<<"_conflicts">>, to_rev_strs(Conflicts)};
+            {<<"_conflicts">>, rev_to_strs(Conflicts)};
         ({deleted_conflicts, DConflicts}) ->
-            {<<"_deleted_conflicts">>, to_rev_strs(DConflicts)}
+            {<<"_deleted_conflicts">>, rev_to_strs(DConflicts)}
         end, Meta).
 
 to_json_attachment_stubs(Attachments) ->

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd.erl Mon Mar  2 23:20:44 2009
@@ -16,7 +16,7 @@
 -export([start_link/0, stop/0, handle_request/3]).
 
 -export([header_value/2,header_value/3,qs_value/2,qs_value/3,qs/1,path/1,absolute_uri/2]).
--export([verify_is_server_admin/1,unquote/1,quote/1,recv/2]).
+-export([verify_is_server_admin/1,unquote/1,quote/1,recv/2,error_info/1]).
 -export([parse_form/1,json_body/1,body/1,doc_etag/1, make_etag/1, etag_respond/3]).
 -export([primary_header_value/2,partition/1,serve_file/3]).
 -export([start_chunked_response/3,send_chunk/2]).
@@ -151,7 +151,7 @@
     catch
         throw:Error ->
             send_error(HttpReq, Error);
-        Tag:Error ->
+        Tag:Error when Error == foo ->
             ?LOG_ERROR("Uncaught error in HTTP request: ~p",[{Tag, Error}]),
             ?LOG_DEBUG("Stacktrace: ~p",[erlang:get_stacktrace()]),
             send_error(HttpReq, Error)
@@ -270,7 +270,7 @@
     ?JSON_DECODE(body(Httpd)).
 
 doc_etag(#doc{revs={Start, [DiskRev|_]}}) ->
-    "\"" ++ ?b2l(couch_doc:to_rev_str({Start, DiskRev})) ++ "\"".
+    "\"" ++ ?b2l(couch_doc:rev_to_str({Start, DiskRev})) ++ "\"".
 
 make_etag(Term) ->
     <<SigInt:128/integer>> = erlang:md5(term_to_binary(Term)),
@@ -362,68 +362,50 @@
     send_chunk(Resp, []).
 
 
-send_error(Req, bad_request) ->
-    send_error(Req, 400, <<"bad_request">>, <<>>);
-send_error(Req, {bad_request, Reason}) ->
-    send_error(Req, 400, <<"bad_request">>, Reason);
-send_error(Req, not_found) ->
-    send_error(Req, 404, <<"not_found">>, <<"Missing">>);
-send_error(Req, {not_found, Reason}) ->
-    send_error(Req, 404, <<"not_found">>, Reason);
-send_error(Req, {conflict, Rev}) ->
-    send_error(Req, 409, <<"conflict">>, Rev);
-send_error(Req, {forbidden, Msg}) ->
-    send_json(Req, 403,
-        {[{<<"error">>,  <<"forbidden">>},
-         {<<"reason">>, Msg}]});
-send_error(Req, {unauthorized, Msg}) ->
-    case couch_config:get("httpd", "WWW-Authenticate", nil) of
-    nil ->
-        Headers = [];
-    Type ->
-        Headers = [{"WWW-Authenticate", Type}]
-    end,
-    send_json(Req, 401, Headers,
-        {[{<<"error">>,  <<"unauthorized">>},
-         {<<"reason">>, Msg}]});
-send_error(Req, {http_error, Code, Headers, Error, Reason}) ->
-    send_json(Req, Code, Headers,
-        {[{<<"error">>, Error}, {<<"reason">>, Reason}]});
-send_error(Req, {user_error, {Props}}) ->
-    {Headers} = proplists:get_value(<<"headers">>, Props, {[]}),
-    send_json(Req,
-        proplists:get_value(<<"http_status">>, Props, 500),
-        Headers,
-        {[{<<"error">>, proplists:get_value(<<"error">>, Props)},
-            {<<"reason">>, proplists:get_value(<<"reason">>, Props)}]});
-send_error(Req, file_exists) ->
-    send_error(Req, 412, <<"file_exists">>, <<"The database could not be "
-        "created, the file already exists.">>);
-send_error(Req, {Error, Reason}) ->
-    send_error(Req, 500, Error, Reason);
-send_error(Req, Error) ->
-    send_error(Req, 500, <<"error">>, Error).
 
+error_info(bad_request) ->
+    {400, <<"bad_request">>, <<>>};
+error_info({bad_request, Reason}) ->
+    {400, <<"bad_request">>, Reason};
+error_info(not_found) ->
+    {404, <<"not_found">>, <<"Missing">>};
+error_info({not_found, Reason}) ->
+    {404, <<"not_found">>, Reason};
+error_info(conflict) ->
+    {409, <<"conflict">>, <<"Document update conflict.">>};
+error_info({forbidden, Msg}) ->
+    {403, <<"forbidden">>, Msg};
+error_info({unauthorized, Msg}) ->
+    {401, <<"unauthorized">>, Msg};
+error_info(file_exists) ->
+    {412, <<"file_exists">>, <<"The database could not be "
+        "created, the file already exists.">>};
+error_info({Error, Reason}) ->
+    {500, couch_util:to_binary(Error), couch_util:to_binary(Reason)};
+error_info(Error) ->
+    {500, <<"unknown_error">>, couch_util:to_binary(Error)}.
 
+send_error(Req, Error) ->
+    {Code, ErrorStr, ReasonStr} = error_info(Error),
+    if Code == 401 ->     
+        case couch_config:get("httpd", "WWW-Authenticate", nil) of
+        nil ->
+            Headers = [];
+        Type ->
+            Headers = [{"WWW-Authenticate", Type}]
+        end;
+    true ->
+        Headers = []
+    end,
+    send_error(Req, Code, Headers, ErrorStr, ReasonStr).
 
-send_error(Req, Code, Error, Msg) when is_atom(Error) ->
-    send_error(Req, Code, list_to_binary(atom_to_list(Error)), Msg);
-send_error(Req, Code, Error, Msg) when is_list(Msg) ->
-    case (catch list_to_binary(Msg)) of
-    Bin when is_binary(Bin) ->
-        send_error(Req, Code, Error, Bin);
-    _ ->
-        send_error(Req, Code, Error, io_lib:format("~p", [Msg]))
-    end;
-send_error(Req, Code, Error, Msg) when not is_binary(Error) ->
-    send_error(Req, Code, list_to_binary(io_lib:format("~p", [Error])), Msg);
-send_error(Req, Code, Error, Msg) when not is_binary(Msg) ->
-    send_error(Req, Code, Error, list_to_binary(io_lib:format("~p", [Msg])));
-send_error(Req, Code, Error, <<>>) ->
-    send_json(Req, Code, {[{<<"error">>, Error}]});
-send_error(Req, Code, Error, Msg) ->
-    send_json(Req, Code, {[{<<"error">>, Error}, {<<"reason">>, Msg}]}).
+send_error(Req, Code, ErrorStr, ReasonStr) ->
+    send_error(Req, Code, [], ErrorStr, ReasonStr).
     
+send_error(Req, Code, Headers, ErrorStr, ReasonStr) ->
+    send_json(Req, Code, Headers,
+        {[{<<"error">>,  ErrorStr},
+         {<<"reason">>, ReasonStr}]}).
 
 
 negotiate_content_type(#httpd{mochi_req=MochiReq}) ->

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl Mon Mar  2 23:20:44 2009
@@ -85,7 +85,7 @@
     send_json(Req, 201, [{"Location", DocUrl}], {[
         {ok, true},
         {id, DocId},
-        {rev, couch_doc:to_rev_str(NewRev)}
+        {rev, couch_doc:rev_to_str(NewRev)}
     ]});
 
 db_req(#httpd{path_parts=[_DbName]}=Req, _Db) ->
@@ -129,25 +129,33 @@
                 Doc#doc{id=Id,revs=Revs}
             end,
             DocsArray),
-        {ok, ResultRevs, _Conflicts} = couch_db:update_docs(Db, Docs, Options),
+        {ok, Results} = couch_db:update_docs(Db, Docs, Options),
 
         % output the results
         DocResults = lists:zipwith(
-            fun(Doc, NewRev) ->
-                {[{<<"id">>, Doc#doc.id}, {<<"rev">>, couch_doc:to_rev_str(NewRev)}]}
+            fun(Doc, {ok, NewRev}) ->
+                {[{<<"id">>, Doc#doc.id}, {<<"rev">>, couch_doc:rev_to_str(NewRev)}]};
+            (Doc, Error) ->
+                {_Code, Err, Msg} = couch_httpd:error_info(Error),
+                % maybe we should add the http error code to the json?
+                {[{<<"id">>, Doc#doc.id}, {<<"error">>, Err}, {"reason", Msg}]}
             end,
-            Docs, ResultRevs),
-        send_json(Req, 201, {[
-            {ok, true},
-            {new_revs, DocResults}
-        ]});
+            Docs, Results),
+        send_json(Req, 201, DocResults);
 
     false ->
         Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray],
-        ok = couch_db:update_docs(Db, Docs, Options, replicated_changes),
-        send_json(Req, 201, {[
-            {ok, true}
-        ]})
+        {ok, Errors} = couch_db:update_docs(Db, Docs, Options, replicated_changes),
+        ErrorsJson = 
+            lists:map(
+                fun({{Id, Rev}, Error}) ->
+                    {_Code, Err, Msg} = couch_httpd:error_info(Error),
+                    {[{<<"id">>, Id},
+                        {<<"rev">>, couch_doc:rev_to_str(Rev)},
+                        {<<"error">>, Err},
+                        {"reason", Msg}]}
+                end, Errors),
+        send_json(Req, 201, ErrorsJson)
     end;
 db_req(#httpd{path_parts=[_,<<"_bulk_docs">>]}=Req, _Db) ->
     send_method_not_allowed(Req, "POST");
@@ -165,7 +173,7 @@
     
     case couch_db:purge_docs(Db, IdsRevs2) of
     {ok, PurgeSeq, PurgedIdsRevs} ->
-        PurgedIdsRevs2 = [{Id, couch_doc:to_rev_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs],
+        PurgedIdsRevs2 = [{Id, couch_doc:rev_to_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs],
         send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs2}}]});
     Error ->
         throw(Error)
@@ -217,14 +225,14 @@
                 deleted_conflict_revs=DelConflictRevs
             } = DocInfo,
             Json = {
-                [{<<"rev">>, couch_doc:to_rev_str(Rev)}] ++
+                [{<<"rev">>, couch_doc:rev_to_str(Rev)}] ++
                 case ConflictRevs of
                     []  ->  [];
-                    _   ->  [{<<"conflicts">>, couch_doc:to_rev_strs(ConflictRevs)}]
+                    _   ->  [{<<"conflicts">>, couch_doc:rev_to_strs(ConflictRevs)}]
                 end ++
                 case DelConflictRevs of
                     []  ->  [];
-                    _   ->  [{<<"deleted_conflicts">>, couch_doc:to_rev_strs(DelConflictRevs)}]
+                    _   ->  [{<<"deleted_conflicts">>, couch_doc:rev_to_strs(DelConflictRevs)}]
                 end ++
                 case Deleted of
                     true -> [{<<"deleted">>, true}];
@@ -242,7 +250,7 @@
     {JsonDocIdRevs} = couch_httpd:json_body(Req),
     JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs],
     {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs2),
-    Results2 = [{Id, [couch_doc:to_rev_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results],
+    Results2 = [{Id, [couch_doc:rev_to_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results],
     send_json(Req, {[
         {missing_revs, {Results2}}
     ]});
@@ -323,7 +331,7 @@
         AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) ->
             case couch_doc:to_doc_info(FullDocInfo) of
             #doc_info{deleted=false, rev=Rev} ->
-                FoldlFun({{Id, Id}, {[{rev, couch_doc:to_rev_str(Rev)}]}}, Offset, Acc);
+                FoldlFun({{Id, Id}, {[{rev, couch_doc:rev_to_str(Rev)}]}}, Offset, Acc);
             #doc_info{deleted=true} ->
                 {ok, Acc}
             end
@@ -347,9 +355,9 @@
                 DocInfo = (catch couch_db:get_doc_info(Db, Key)),
                 Doc = case DocInfo of
                 {ok, #doc_info{id=Id, rev=Rev, deleted=false}} = DocInfo ->
-                    {{Id, Id}, {[{rev, couch_doc:to_rev_str(Rev)}]}};
+                    {{Id, Id}, {[{rev, couch_doc:rev_to_str(Rev)}]}};
                 {ok, #doc_info{id=Id, rev=Rev, deleted=true}} = DocInfo ->
-                    {{Id, Id}, {[{rev, couch_doc:to_rev_str(Rev)}, {deleted, true}]}};
+                    {{Id, Id}, {[{rev, couch_doc:rev_to_str(Rev)}, {deleted, true}]}};
                 not_found ->
                     {{Key, error}, not_found};
                 _ ->
@@ -458,13 +466,13 @@
     % open revision Rev or Current  
     Doc = couch_doc_open(Db, SourceDocId, SourceRev, []),
     % save new doc
-    {ok, NewTargetRev} = couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRevs}, []),
-
-    send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:to_rev_str(NewTargetRev)) ++ "\""}], {[
-        {ok, true},
-        {id, TargetDocId},
-        {rev, couch_doc:to_rev_str(NewTargetRev)}
-    ]});
+    case couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRevs}, []) of
+    {ok, NewTargetRev} ->
+        send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:rev_to_str(NewTargetRev)) ++ "\""}],
+            update_result_to_json({ok, NewTargetRev}));
+    Error ->
+        throw(Error)
+    end;
 
 db_doc_req(#httpd{method='MOVE'}=Req, Db, SourceDocId) ->
     SourceRev = {SourceRevPos, SourceRevId} =
@@ -481,25 +489,25 @@
 
     % save new doc & delete old doc in one operation
     Docs = [
-        Doc#doc{id=TargetDocId, revs=TargetRevs},
-        #doc{id=SourceDocId, revs={SourceRevPos, [SourceRevId]}, deleted=true}
+        #doc{id=SourceDocId, revs={SourceRevPos, [SourceRevId]}, deleted=true},
+        Doc#doc{id=TargetDocId, revs=TargetRevs}
         ],
-
-    {ok, ResultRevs, _} = couch_db:update_docs(Db, Docs, []),
-
-    DocResults = lists:zipwith(
-        fun(FDoc, NewRev) ->
-            {[{id, FDoc#doc.id}, {rev, couch_doc:to_rev_str(NewRev)}]}
-        end,
-        Docs, ResultRevs),
+    {ok, [SourceResult, TargetResult]} = couch_db:update_docs(Db, Docs, []),
+    
     send_json(Req, 201, {[
-        {ok, true},
-        {new_revs, DocResults}
+        {SourceDocId, update_result_to_json(SourceResult)},
+        {TargetDocId, update_result_to_json(TargetResult)}
     ]});
 
 db_doc_req(Req, _Db, _DocId) ->
     send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY,MOVE").
 
+update_result_to_json({ok, NewRev}) ->
+    {[{rev, couch_doc:rev_to_str(NewRev)}]};
+update_result_to_json(Error) ->
+    {_Code, ErrorStr, Reason} = couch_httpd:error_info(Error),
+    {[{error, ErrorStr}, {reason, Reason}]}.
+
 
 update_doc(Req, Db, DocId, Json) ->
     #doc{deleted=Deleted} = Doc = couch_doc:from_json_obj(Json),
@@ -522,7 +530,7 @@
         Options = []
     end,
     {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=Revs}, Options),
-    NewRevStr = couch_doc:to_rev_str(NewRev),
+    NewRevStr = couch_doc:rev_to_str(NewRev),
     send_json(Req, if Deleted -> 200; true -> 201 end,
         [{"Etag", <<"\"", NewRevStr/binary, "\"">>}], {[
             {ok, true},
@@ -621,7 +629,7 @@
     send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {[
         {ok, true},
         {id, DocId},
-        {rev, couch_doc:to_rev_str(UpdatedRev)}
+        {rev, couch_doc:rev_to_str(UpdatedRev)}
     ]});
 
 db_attachment_req(Req, _Db, _DocId, _FileNameParts) ->

Modified: couchdb/branches/rep_security/src/couchdb/couch_rep.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_rep.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_rep.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_rep.erl Mon Mar  2 23:20:44 2009
@@ -212,7 +212,7 @@
 save_docs_buffer(DbTarget, DocsBuffer, []) ->
     receive
     {Src, shutdown} ->
-        ok = update_docs(DbTarget, lists:reverse(DocsBuffer), [], replicated_changes),
+        {ok, _UpdateErrors} = update_docs(DbTarget, lists:reverse(DocsBuffer), [], replicated_changes),
         Src ! {done, self(), [{<<"docs_written">>, length(DocsBuffer)}]}
     end;
 save_docs_buffer(DbTarget, DocsBuffer, UpdateSequences) ->
@@ -225,7 +225,7 @@
         Src ! got_it,
         case couch_util:should_flush() of
             true ->
-                ok = update_docs(DbTarget, lists:reverse(Docs++DocsBuffer), [], 
+                {ok, _UpdateErrors} = update_docs(DbTarget, lists:reverse(Docs++DocsBuffer), [], 
                     replicated_changes),
                 save_docs_buffer(DbTarget, [], Rest);
             false ->
@@ -233,7 +233,7 @@
         end;
         {Src, shutdown} ->
         ?LOG_ERROR("received shutdown while waiting for more update_seqs", []),
-        ok = update_docs(DbTarget, lists:reverse(DocsBuffer), [], replicated_changes),
+        {ok, _Errors} = update_docs(DbTarget, lists:reverse(DocsBuffer), [], replicated_changes),
         Src ! {done, self(), [{<<"docs_written">>, length(DocsBuffer)}]}
     end.
 
@@ -401,7 +401,7 @@
     end.
 
 get_missing_revs(#http_db{uri=DbUrl, headers=Headers}, DocIdRevsList) ->
-    DocIdRevsList2 = [{Id, couch_doc:to_rev_strs(Revs)} || {Id, Revs} <- DocIdRevsList],
+    DocIdRevsList2 = [{Id, couch_doc:rev_to_strs(Revs)} || {Id, Revs} <- DocIdRevsList],
     {ResponseMembers} = do_http_request(DbUrl ++ "_missing_revs", post, Headers,
             {DocIdRevsList2}),
     {DocMissingRevsList} = proplists:get_value(<<"missing_revs">>, ResponseMembers),
@@ -422,15 +422,24 @@
     couch_db:update_doc(Db, Doc, Options).
 
 update_docs(_, [], _, _) ->
-    ok;
-update_docs(#http_db{uri=DbUrl, headers=Headers}, Docs, [], UpdateType) ->
-    NewEdits = UpdateType == interactive_edit,
+    {ok, []};
+update_docs(#http_db{uri=DbUrl, headers=Headers}, Docs, [], replicated_changes) ->
     JsonDocs = [couch_doc:to_json_obj(Doc, [revs,attachments]) || Doc <- Docs],
-    {Returned} =
+    ErrorsJson =
         do_http_request(DbUrl ++ "_bulk_docs", post, Headers,
-                {[{new_edits, NewEdits}, {docs, JsonDocs}]}),
-    true = proplists:get_value(<<"ok">>, Returned),
-    ok;
+                {[{new_edits, false}, {docs, JsonDocs}]}),
+    ErrorsList =
+    lists:map(
+        fun({Props}) ->
+            Id = proplists:get_value(<<"id">>, Props),
+            Rev = couch_doc:parse_rev(proplists:get_value(<<"rev">>, Props)),
+            ErrId = couch_util:to_existing_atom(
+                    proplists:get_value(<<"error">>, Props)),
+            Reason = proplists:get_value(<<"reason">>, Props),
+            Error = {ErrId, Reason},
+            {{Id, Rev}, Error}
+        end, ErrorsJson),
+    {ok, ErrorsList};
 update_docs(Db, Docs, Options, UpdateType) ->
     couch_db:update_docs(Db, Docs, Options, UpdateType).
 
@@ -448,7 +457,7 @@
 
 
 open_doc_revs(#http_db{uri=DbUrl, headers=Headers}, DocId, Revs0, Options) ->
-    Revs = couch_doc:to_rev_strs(Revs0),
+    Revs = couch_doc:rev_to_strs(Revs0),
     QueryOptionStrs =
     lists:map(fun(latest) ->
             % latest is only option right now

Modified: couchdb/branches/rep_security/src/couchdb/couch_util.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_util.erl?rev=749468&r1=749467&r2=749468&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_util.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_util.erl Mon Mar  2 23:20:44 2009
@@ -13,7 +13,7 @@
 -module(couch_util).
 
 -export([start_driver/1]).
--export([should_flush/0, should_flush/1, to_existing_atom/1]).
+-export([should_flush/0, should_flush/1, to_existing_atom/1, to_binary/1]).
 -export([new_uuid/0, rand32/0, implode/2, collate/2, collate/3]).
 -export([abs_pathname/1,abs_pathname/2, trim/1, ascii_lower/1]).
 -export([encodeBase64/1, decodeBase64/1, to_hex/1,parse_term/1,dict_find/3]).
@@ -57,6 +57,19 @@
 to_digit(N)             -> $a + N-10.
 
 
+to_binary(V) when is_binary(V) ->
+    V;
+to_binary(V) when is_list(V) -> 
+    try list_to_binary(V)
+    catch
+        _ -> list_to_binary(io_lib:format("~p", [V]))
+    end;
+to_binary(V) when is_atom(V) ->
+    list_to_binary(atom_to_list(V));
+to_binary(V) ->
+    list_to_binary(io_lib:format("~p", [V])).
+
+
 parse_term(Bin) when is_binary(Bin)->
     parse_term(binary_to_list(Bin));
 parse_term(List) ->



Mime
View raw message