You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@couchdb.apache.org by da...@apache.org on 2009/02/07 06:23:04 UTC

svn commit: r741844 [1/2] - in /couchdb/branches/rep_security: ./ etc/couchdb/ share/www/script/ src/couchdb/

Author: damien
Date: Sat Feb  7 05:23:02 2009
New Revision: 741844

URL: http://svn.apache.org/viewvc?rev=741844&view=rev
Log:
Initial check in of replication security work, with new transaction code to optionally allow conflicts, and new revision trees with history stemming support. The replication security stuff still doesn't work, the stemming itself isn't implemented and transaction errors don't work cleanly.

Added:
    couchdb/branches/rep_security/
      - copied from r741842, couchdb/trunk/
Modified:
    couchdb/branches/rep_security/etc/couchdb/local_dev.ini
    couchdb/branches/rep_security/share/www/script/couch.js
    couchdb/branches/rep_security/share/www/script/couch_tests.js
    couchdb/branches/rep_security/src/couchdb/couch_db.erl
    couchdb/branches/rep_security/src/couchdb/couch_db.hrl
    couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl
    couchdb/branches/rep_security/src/couchdb/couch_doc.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd_misc_handlers.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd_show.erl
    couchdb/branches/rep_security/src/couchdb/couch_httpd_view.erl
    couchdb/branches/rep_security/src/couchdb/couch_key_tree.erl
    couchdb/branches/rep_security/src/couchdb/couch_rep.erl

Modified: couchdb/branches/rep_security/etc/couchdb/local_dev.ini
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/etc/couchdb/local_dev.ini?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/etc/couchdb/local_dev.ini (original)
+++ couchdb/branches/rep_security/etc/couchdb/local_dev.ini Sat Feb  7 05:23:02 2009
@@ -12,7 +12,7 @@
 ;bind_address = 127.0.0.1
 
 [log]
-level = error
+level = info
 
 [update_notification]
 ;unique notifier name=/full/path/to/exe -with "cmd line arg"

Modified: couchdb/branches/rep_security/share/www/script/couch.js
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/share/www/script/couch.js?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/share/www/script/couch.js [utf-8] (original)
+++ couchdb/branches/rep_security/share/www/script/couch.js [utf-8] Sat Feb  7 05:23:02 2009
@@ -269,9 +269,10 @@
   return JSON.parse(CouchDB.last_req.responseText).version;
 }
 
-CouchDB.replicate = function(source, target) {
+CouchDB.replicate = function(source, target, options) {
+  options = options || {};
   CouchDB.last_req = CouchDB.request("POST", "/_replicate", {
-    body: JSON.stringify({source: source, target: target})
+    body: JSON.stringify({source: source, target: target, "options": options})
   });
   CouchDB.maybeThrowError(CouchDB.last_req);
   return JSON.parse(CouchDB.last_req.responseText);

Modified: couchdb/branches/rep_security/share/www/script/couch_tests.js
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/share/www/script/couch_tests.js?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/share/www/script/couch_tests.js [utf-8] (original)
+++ couchdb/branches/rep_security/share/www/script/couch_tests.js [utf-8] Sat Feb  7 05:23:02 2009
@@ -968,7 +968,7 @@
     var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc/foo.txt");
     T(xhr.responseText == "This is a base64 encoded text");
     T(xhr.getResponseHeader("Content-Type") == "text/plain");
-    T(xhr.getResponseHeader("Etag") == save_response.rev);
+    T(xhr.getResponseHeader("Etag") == '"' + save_response.rev + '"');
 
     // empty attachment
     var binAttDoc2 = {
@@ -2044,20 +2044,7 @@
 
     var doc = {integer: 1, string: "1", array: [1, 2, 3]};
     T(db.save(doc).ok);
-/*
-    // make sure that attempting to change the document throws an error
-    var results = db.query(function(doc) {
-      doc.integer = 2;
-      emit(null, doc);
-    });
-    T(results.total_rows == 0);
-
-    var results = db.query(function(doc) {
-      doc.array[0] = 0;
-      emit(null, doc);
-    });
-    T(results.total_rows == 0);
-*/
+    
     // make sure that a view cannot invoke interpreter internals such as the
     // garbage collector
     var results = db.query(function(doc) {
@@ -2166,7 +2153,7 @@
           };
         },
       
-       deletes_test: new function () {
+        deletes_test: new function () {
           this.init = function(dbA, dbB) {
             T(dbA.save({_id:"foo1",value:"a"}).ok);
           };
@@ -2850,7 +2837,10 @@
     
     /*
      purge is not to be confused with a document deletion.  It removes the
-     document and all edit history from the local instance of the database.
+     document and all edit history from the local instance of the database,
+     but the purge does not replicate. If not also purged on other nodes,
+     it is possible the same document will replicate back to the purge db
+     just like it was never purged.
     */
 
     var numDocs = 10;
@@ -3016,118 +3006,203 @@
     if (debug) debugger;
     
     run_on_modified_server(
-      [{section: "httpd",
-        key: "authentication_handler",
-        value: "{couch_httpd, special_test_authentication_handler}"},
-      {section:"httpd",
-        key: "WWW-Authenticate",
-        value:  "X-Couch-Test-Auth"}],
-        
-      function () {
+    [{section: "httpd",
+      key: "authentication_handler",
+      value: "{couch_httpd, special_test_authentication_handler}"},
+     {section:"httpd",
+      key: "WWW-Authenticate",
+      value:  "X-Couch-Test-Auth"}],
+      
+    function () {
+    // try saving document usin the wrong credentials
+    var wrongPasswordDb = new CouchDB("test_suite_db",
+      {"WWW-Authenticate": "X-Couch-Test-Auth Damien Katz:foo"}
+    );
+
+    try {
+      wrongPasswordDb.save({foo:1,author:"Damien Katz"});
+      T(false && "Can't get here. Should have thrown an error 1");
+    } catch (e) {
+      T(e.error == "unauthorized");
+      T(wrongPasswordDb.last_req.status == 401);
+    }
     
-        // try saving document usin the wrong credentials
-        var wrongPasswordDb = new CouchDB("test_suite_db",
-          {"WWW-Authenticate": "X-Couch-Test-Auth Damien Katz:foo"}
-        );
     
-        try {
-          wrongPasswordDb.save({foo:1,author:"Damien Katz"});
-          T(false && "Can't get here. Should have thrown an error 1");
-        } catch (e) {
-          T(e.error == "unauthorized");
-          T(wrongPasswordDb.last_req.status == 401);
+    // Create the design doc that will run custom validation code
+    var designDoc = {
+      _id:"_design/test",
+      language: "javascript",
+      validate_doc_update: "(" + (function (newDoc, oldDoc, userCtx) {
+        // docs should have an author field.
+        if (!newDoc._deleted && !newDoc.author) {
+          throw {forbidden:
+              "Documents must have an author field"};
         }
-        
-        
-        // Create the design doc that will run custom validation code
-        var designDoc = {
-          _id:"_design/test",
-          language: "javascript",
-          validate_doc_update: "(" + (function (newDoc, oldDoc, userCtx) {
-            // docs should have an author field.
-            if (!newDoc._deleted && !newDoc.author) {
-              throw {forbidden:
-                  "Documents must have an author field"};
-            }
-            if (oldDoc && oldDoc.author != userCtx.name) {
-                throw {unauthorized:
-                    "You are not the author of this document. You jerk."};
-            }
-          }).toString() + ")"
+        if (oldDoc && oldDoc.author != userCtx.name) {
+            throw {unauthorized:
+                "You are not the author of this document. You jerk."};
         }
+      }).toString() + ")"
+    }
 
-        // Save a document normally
-        var userDb = new CouchDB("test_suite_db",
-          {"WWW-Authenticate": "X-Couch-Test-Auth Damien Katz:pecan pie"}
-        );
-        
-        T(userDb.save({_id:"testdoc", foo:1, author:"Damien Katz"}).ok);
-        
-        // Attempt to save the design as a non-admin
-        try {
-          userDb.save(designDoc);
-          T(false && "Can't get here. Should have thrown an error on design doc");
-        } catch (e) {
-          T(e.error == "unauthorized");
-          T(userDb.last_req.status == 401);
-        }
-        
-        // add user as admin
-        db.setAdmins(["Damien Katz"]);
-        
-        T(userDb.save(designDoc).ok);
+    // Save a document normally
+    var userDb = new CouchDB("test_suite_db",
+      {"WWW-Authenticate": "X-Couch-Test-Auth Damien Katz:pecan pie"}
+    );
     
-        // update the document
-        var doc = userDb.open("testdoc");
-        doc.foo=2;
-        T(userDb.save(doc).ok);
-        
-        // Save a document that's missing an author field.
-        try {
-          userDb.save({foo:1});
-          T(false && "Can't get here. Should have thrown an error 2");
-        } catch (e) {
-          T(e.error == "forbidden");
-          T(userDb.last_req.status == 403);
-        }
+    T(userDb.save({_id:"testdoc", foo:1, author:"Damien Katz"}).ok);
     
-        // Now attempt to update the document as a different user, Jan 
-        var user2Db = new CouchDB("test_suite_db",
-          {"WWW-Authenticate": "X-Couch-Test-Auth Jan Lehnardt:apple"}
-        );
+    // Attempt to save the design as a non-admin
+    try {
+      userDb.save(designDoc);
+      T(false && "Can't get here. Should have thrown an error on design doc");
+    } catch (e) {
+      T(e.error == "unauthorized");
+      T(userDb.last_req.status == 401);
+    }
     
-        var doc = user2Db.open("testdoc");
-        doc.foo=3;
-        try {
-          user2Db.save(doc);
-          T(false && "Can't get here. Should have thrown an error 3");
-        } catch (e) {
-          T(e.error == "unauthorized");
-          T(user2Db.last_req.status == 401);
-        }
-        
-        // Now have Damien change the author to Jan
-        doc = userDb.open("testdoc");
-        doc.author="Jan Lehnardt";
-        T(userDb.save(doc).ok);
-        
-        // Now update the document as Jan
-        doc = user2Db.open("testdoc");
-        doc.foo = 3;
-        T(user2Db.save(doc).ok);
-        
-        // Damien can't delete it
-        try {
-          userDb.deleteDoc(doc);
-          T(false && "Can't get here. Should have thrown an error 4");
-        } catch (e) {
-          T(e.error == "unauthorized");
-          T(userDb.last_req.status == 401);
-        }
+    // add user as admin
+    db.setAdmins(["Damien Katz"]);
+    
+    T(userDb.save(designDoc).ok);
+
+    // update the document
+    var doc = userDb.open("testdoc");
+    doc.foo=2;
+    T(userDb.save(doc).ok);
+    
+    // Save a document that's missing an author field.
+    try {
+      userDb.save({foo:1});
+      T(false && "Can't get here. Should have thrown an error 2");
+    } catch (e) {
+      T(e.error == "forbidden");
+      T(userDb.last_req.status == 403);
+    }
+
+    // Now attempt to update the document as a different user, Jan 
+    var user2Db = new CouchDB("test_suite_db",
+      {"WWW-Authenticate": "X-Couch-Test-Auth Jan Lehnardt:apple"}
+    );
+
+    var doc = user2Db.open("testdoc");
+    doc.foo=3;
+    try {
+      user2Db.save(doc);
+      T(false && "Can't get here. Should have thrown an error 3");
+    } catch (e) {
+      T(e.error == "unauthorized");
+      T(user2Db.last_req.status == 401);
+    }
+    
+    // Now have Damien change the author to Jan
+    doc = userDb.open("testdoc");
+    doc.author="Jan Lehnardt";
+    T(userDb.save(doc).ok);
+    
+    // Now update the document as Jan
+    doc = user2Db.open("testdoc");
+    doc.foo = 3;
+    T(user2Db.save(doc).ok);
+    
+    // Damien can't delete it
+    try {
+      userDb.deleteDoc(doc);
+      T(false && "Can't get here. Should have thrown an error 4");
+    } catch (e) {
+      T(e.error == "unauthorized");
+      T(userDb.last_req.status == 401);
+    }
+    
+    // Now delete document
+    T(user2Db.deleteDoc(doc).ok);
+    var AuthHeaders = {"WWW-Authenticate": "X-Couch-Test-Auth Christopher Lenz:dog food"};
+    var host = CouchDB.host;
+    var dbPairs = [
+      {source:"test_suite_db_a",
+        target:"test_suite_db_b",
+        options:{}},
         
-        // Now delete document
-        T(user2Db.deleteDoc(doc).ok);
-      });
+      {source:"test_suite_db_a",
+        target:"http://" + host + "/test_suite_db_b",
+        options: {target_headers: AuthHeaders}},
+            
+      {source:"http://" + host + "/test_suite_db_a",
+        target:"test_suite_db_b",
+        options: {source_headers: AuthHeaders}},
+            
+      {source:"http://" + host + "/test_suite_db_a",
+        target:"http://" + host + "/test_suite_db_b",
+        options:{source_headers: AuthHeaders, target_headers: AuthHeaders}},
+    ]
+    var adminDbA = new CouchDB("test_suite_db_a");
+    var adminDbB = new CouchDB("test_suite_db_b");
+    var dbA = new CouchDB("test_suite_db_a",
+        {"WWW-Authenticate": "X-Couch-Test-Auth Christopher Lenz:dog food"});
+    var dbB = new CouchDB("test_suite_db_b",
+        {"WWW-Authenticate": "X-Couch-Test-Auth Christopher Lenz:dog food"});
+    var numDocs = 10;
+    var xhr;
+    for (var testPair = 0; testPair < dbPairs.length; testPair++) {
+      var A = dbPairs[testPair].source
+      var B = dbPairs[testPair].target
+      var Options = dbPairs[testPair].options
+
+      adminDbA.deleteDb();
+      adminDbA.createDb();
+      adminDbB.deleteDb();
+      adminDbB.createDb();
+      
+      // save and replicate a documents that will and will not pass our design
+      // doc validation function.
+      dbA.save({_id:"foo1",value:"a",author:"Noah Slater"});
+      dbA.save({_id:"foo2",value:"a",author:"Christopher Lenz"});
+      dbA.save({_id:"bad1",value:"a"});
+
+      T(CouchDB.replicate(A, B).ok);
+      T(CouchDB.replicate(B, A).ok);
+
+      T(dbA.open("foo1"));
+      T(dbB.open("foo1"));
+      T(dbA.open("foo2"));
+      T(dbB.open("foo2"));
+      
+      // save the design doc to dbA
+      delete designDoc._rev; // clear rev from previous saves
+      adminDbA.save(designDoc);
+
+      // no affect on already saved docs
+      T(dbA.open("bad1"));
+      
+      // Update some docs on dbB. Since the design hasn't replicated, anything
+      // is allowed.
+      
+      // this edit will fail validation on replication to dbA (no author)
+      T(dbB.save({_id:"bad2",value:"a"}).ok);
+      
+      // this edit will fail security on replication to dbA (wrong author
+      //  replicating the change)
+      var foo1 = dbB.open("foo1");
+      foo1.value = "b";
+      dbB.save(foo1);
+      
+      // this is a legal edit
+      var foo2 = dbB.open("foo2");
+      foo2.value = "b";
+      dbB.save(foo2);
+      
+      T(CouchDB.replicate(B, A).ok);
+      
+      // bad2 should not be on dbA
+      T(dbA.open("bad2") == null);
+      
+      // The edit to foo1 should not have replicated.
+      T(dbA.open("foo1").value == "a");
+      
+      // The edit to foo2 should have replicated.
+      T(dbA.open("foo2").value == "a");
+    }
+    });
   },
   
   

Modified: couchdb/branches/rep_security/src/couchdb/couch_db.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_db.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_db.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_db.erl Sat Feb  7 05:23:02 2009
@@ -15,7 +15,7 @@
 
 -export([open/2,close/1,create/2,start_compact/1,get_db_info/1]).
 -export([open_ref_counted/2,num_refs/1,monitor/1,count_changes_since/2]).
--export([update_doc/3,update_docs/4,update_docs/2,update_docs/3,delete_doc/3]).
+-export([update_doc/3,update_docs/4,update_docs/2,update_docs/3]).
 -export([get_doc_info/2,open_doc/2,open_doc/3,open_doc_revs/4]).
 -export([get_missing_revs/2,name/1,doc_to_tree/1,get_update_seq/1,get_committed_update_seq/1]).
 -export([enum_docs/4,enum_docs/5,enum_docs_since/4,enum_docs_since/5]).
@@ -84,11 +84,6 @@
 start_compact(#db{update_pid=Pid}) ->
     gen_server:cast(Pid, start_compact).
 
-delete_doc(Db, Id, Revisions) ->
-    DeletedDocs = [#doc{id=Id, revs=[Rev], deleted=true} || Rev <- Revisions],
-    {ok, [Result]} = update_docs(Db, DeletedDocs, []),
-    {ok, Result}.
-
 open_doc(Db, IdOrDocInfo) ->
     open_doc(Db, IdOrDocInfo, []).
 
@@ -197,8 +192,12 @@
     Name.
     
 update_doc(Db, Doc, Options) ->
-    {ok, [NewRev]} = update_docs(Db, [Doc], Options),
-    {ok, NewRev}.
+    case update_docs(Db, [Doc], Options) of
+    {ok, [NewRev], _} ->
+        {ok, NewRev};
+    {conflicts, [ConflictRev]} ->
+        throw({conflict, ConflictRev})
+    end.
 
 update_docs(Db, Docs) ->
     update_docs(Db, Docs, []).
@@ -252,117 +251,186 @@
     Doc.
 
 
-prep_and_validate_new_edit(Db, #doc{id=Id,revs=[NewRev|PrevRevs]}=Doc,
+prep_and_validate_update(Db, #doc{id=Id,revs={RevStart, [_NewRev|PrevRevs]}}=Doc,
         OldFullDocInfo, LeafRevsDict) ->
+    NilDocFun = fun() -> nil end,
     case PrevRevs of
     [PrevRev|_] ->
-        case dict:find(PrevRev, LeafRevsDict) of
-        {ok, {Deleted, Sp, DiskRevs}} ->
-            Doc2 = Doc#doc{revs=[NewRev|DiskRevs]},
-            case couch_doc:has_stubs(Doc2) of
+        case dict:find({RevStart-1, PrevRev}, LeafRevsDict) of
+        {ok, {Deleted, DiskSp, DiskRevs}} ->
+            case couch_doc:has_stubs(Doc) of
             true ->
-                DiskDoc = make_doc(Db, Id, Deleted, Sp, DiskRevs),
-                Doc3 = couch_doc:merge_stubs(Doc2, DiskDoc),
-                validate_doc_update(Db, Doc3, fun() -> DiskDoc end);
+                DiskDoc = make_doc(Db, Id, Deleted, DiskSp, DiskRevs),
+                Doc2 = couch_doc:merge_stubs(Doc, DiskDoc),
+                {ok, validate_doc_update(Db, Doc2, fun() -> DiskDoc end)};
             false ->
-                LoadDiskDoc = fun() -> make_doc(Db,Id,Deleted,Sp,DiskRevs) end,
-                validate_doc_update(Db, Doc2, LoadDiskDoc)
+                LoadDiskDoc = fun() -> make_doc(Db,Id,Deleted,DiskSp,DiskRevs) end,
+                {ok, validate_doc_update(Db, Doc, LoadDiskDoc)}
             end;
         error ->
-            throw(conflict)
+            {conflict, validate_doc_update(Db, Doc, NilDocFun)}
         end;
     [] ->
         % new doc, and we have existing revs.
         if OldFullDocInfo#full_doc_info.deleted ->
             % existing docs are deletions
-            validate_doc_update(Db, Doc, nil);
+            {ok, validate_doc_update(Db, Doc, NilDocFun)};
         true ->
-            throw(conflict)
+            {conflict, validate_doc_update(Db, Doc, NilDocFun)}
         end
     end.
 
+
+
+prep_and_validate_updates(_Db, [], [], AccPrepped, AccConflicts) ->
+   {AccPrepped, AccConflicts};
+prep_and_validate_updates(Db, [DocBucket|RestBuckets], [not_found|RestLookups], AccPrepped, AccConflicts) ->
+    % no existing revs are known, make sure no old revs specified.
+    AccConflicts2 = [Doc || #doc{revs=[_NewRev,_OldRev|_]=Doc} <- DocBucket] ++ AccConflicts,
+    AccPrepped2 = [[validate_doc_update(Db, Doc, fun()-> nil end) || Doc <- DocBucket] | AccPrepped],
+    prep_and_validate_updates(Db, RestBuckets, RestLookups, AccPrepped2, AccConflicts2);
+prep_and_validate_updates(Db, [DocBucket|RestBuckets],
+        [{ok, #full_doc_info{rev_tree=OldRevTree}=OldFullDocInfo}|RestLookups],
+        AccPrepped, AccConflicts) ->
+    Leafs = couch_key_tree:get_all_leafs(OldRevTree),
+    LeafRevsDict = dict:from_list([{{Start, RevId}, {Deleted, Sp, Revs}} ||
+            {{Deleted, Sp}, {Start, [RevId|_]}=Revs} <- Leafs]),
+    {Prepped, AccConflicts2} = lists:foldl(
+        fun(Doc, {Docs2Acc, Conflicts2Acc}) ->
+            case prep_and_validate_update(Db, Doc, OldFullDocInfo,
+                    LeafRevsDict) of
+            {ok, Doc} ->
+                {[Doc | Docs2Acc], Conflicts2Acc};
+            {conflict, Doc} ->
+                {[Doc | Docs2Acc], [Doc|Conflicts2Acc]}
+            end
+        end,
+        {[], AccConflicts}, DocBucket),
+    prep_and_validate_updates(Db, RestBuckets, RestLookups, [Prepped | AccPrepped], AccConflicts2).
+
+
 update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options) ->
-    update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options, true).
+    update_docs(#db{update_pid=UpdatePid}=Db, Docs, Options, interactive_edit).
+    
+should_validate(Db, Docs) ->
+    % true if our db has validation funs, there are design docs,
+    % or we have attachments.
+    (Db#db.validate_doc_funs /= []) orelse
+        lists:any(
+            fun(#doc{id= <<?DESIGN_DOC_PREFIX, _/binary>>}) ->
+                true;
+            (#doc{attachments=Atts}) ->
+                Atts /= []
+            end, Docs).
+
 
-update_docs(Db, Docs, Options, false) ->
+update_docs(Db, Docs, Options, replicated_changes) ->
     DocBuckets = group_alike_docs(Docs),
-    Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
     
-    ExistingDocs = get_full_doc_infos(Db, Ids),
+    case should_validate(Db, Docs) of
+    true ->
+        Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
+        ExistingDocs = get_full_doc_infos(Db, Ids),
     
-    DocBuckets2 = lists:zipwith(
-        fun(Bucket, not_found) ->
-            [validate_doc_update(Db, Doc, fun()-> nil end) || Doc <- Bucket];
-        (Bucket, {ok, #full_doc_info{rev_tree=OldRevTree}}) ->
-            NewTree = lists:foldl(
-                fun(Doc, RevTreeAcc) ->
-                    couch_key_tree:merge(RevTreeAcc, doc_to_tree(Doc))
-                end,
-                OldRevTree, Bucket),
-            Leafs = couch_key_tree:get_all_leafs_full(NewTree),
-            LeafRevsFullDict = dict:from_list( [{Rev, FullPath} || [{Rev, _}|_]=FullPath <- Leafs]),
-            lists:flatmap(
-                fun(#doc{revs=[Rev|_]}=Doc) ->
-                    case dict:find(Rev, LeafRevsFullDict) of
-                    {ok, [{Rev, #doc{id=Id}}|_]=Path} ->
-                        % our unflushed doc is a leaf node. Go back on the path 
-                        % to find the previous rev that's on disk.
-                        LoadPrevRev = fun() ->
-                            make_first_doc_on_disk(Db, Id, Path)
-                        end,
-                        [validate_doc_update(Db, Doc, LoadPrevRev)];
-                    _ ->
-                        % this doc isn't a leaf or is already exists in the tree. ignore
-                        []
-                    end
-                end, Bucket)
-        end,
-        DocBuckets, ExistingDocs),
-    write_and_commit(Db, DocBuckets2, Options);
+        DocBuckets2 = lists:zipwith(
+            fun(Bucket, not_found) ->
+                [validate_doc_update(Db, Doc, fun()-> nil end) || Doc <- Bucket];
+            (Bucket, {ok, #full_doc_info{rev_tree=OldTree}}) ->
+                NewRevTree = lists:foldl(
+                    fun(NewDoc, AccTree) ->
+                        {NewTree, _} = couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]),
+                        NewTree
+                    end,
+                    OldTree, Bucket),
+                Leafs = couch_key_tree:get_all_leafs_full(NewRevTree),
+                LeafRevsFullDict = dict:from_list( [{{Start, RevId}, FullPath} || {Start, [{RevId, _}|_]}=FullPath <- Leafs]),
+                lists:flatmap(
+                    fun(#doc{id=Id,revs={Pos, [RevId|_]}}=Doc) ->
+                        case dict:find({Pos, RevId}, LeafRevsFullDict) of
+                        {ok, {Start, Path}} ->
+                            % our unflushed doc is a leaf node. Go back on the path 
+                            % to find the previous rev that's on disk.
+                            LoadPrevRev = fun() ->
+                                make_first_doc_on_disk(Db, Id, Start - 1, lists:tail(Path))
+                            end,
+                            [validate_doc_update(Db, Doc, LoadPrevRev)];
+                        _ ->
+                            % this doc isn't a leaf or already exists in the tree. ignore
+                            []
+                        end
+                    end, Bucket)
+            end,
+            DocBuckets, ExistingDocs),
+        DocBuckets3 = [Bucket || [_|_]=Bucket <- DocBuckets2]; % remove empty buckets
+    false ->
+        DocBuckets3 = DocBuckets
+    end,
+    {ok, _} = write_and_commit(Db, DocBuckets3, [merge_conflicts | Options]),
+    ok;
     
-update_docs(Db, Docs, Options, true) ->
-        % go ahead and generate the new revision ids for the documents.
+update_docs(Db, Docs, Options, interactive_edit) ->
+    % go ahead and generate the new revision ids for the documents.
     Docs2 = lists:map(
-        fun(#doc{id=Id,revs=Revs}=Doc) ->
+        fun(#doc{id=Id,revs={Start, RevIds}}=Doc) ->
             case Id of
             <<?LOCAL_DOC_PREFIX, _/binary>> ->
-                Rev = case Revs of [] -> 0; [Rev0|_] -> list_to_integer(binary_to_list(Rev0)) end,
-                Doc#doc{revs=[list_to_binary(integer_to_list(Rev + 1))]};
+                Rev = case RevIds of [] -> 0; [Rev0|_] -> list_to_integer(?b2l(Rev0)) end,
+                Doc#doc{revs={Start, [?l2b(integer_to_list(Rev + 1))]}};
             _ ->
-                Doc#doc{revs=[list_to_binary(integer_to_list(couch_util:rand32())) | Revs]}
+                Doc#doc{revs={Start+1, [?l2b(integer_to_list(couch_util:rand32())) | RevIds]}}
             end
         end, Docs),
     DocBuckets = group_alike_docs(Docs2),
-    Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
     
-    % lookup the doc by id and get the most recent
-    
-    ExistingDocs = get_full_doc_infos(Db, Ids),
+    case ((Db#db.validate_doc_funs /= []) orelse
+            lists:any(fun(#doc{id= <<?DESIGN_DOC_PREFIX, _/binary>>}) -> true;
+                            (Doc) -> couch_doc:has_stubs(Doc) end, Docs)) of
+    true ->
+        % lookup the doc by id and get the most recent
+        Ids = [Id || [#doc{id=Id}|_] <- DocBuckets],
+        ExistingDocInfos = get_full_doc_infos(Db, Ids),
+    
+        {DocBuckets2, PreConflicts} = prep_and_validate_updates(Db, DocBuckets, ExistingDocInfos, [], []),
+    
+        case PreConflicts of
+        [] ->
+            Continue = ok;
+        _ ->
+            case lists:member(merge_conflicts, Options) of
+            true -> Continue = ok;
+            false -> Continue = {conflicts, PreConflicts}
+            end
+        end;
+    false ->
+        Continue = ok,
+        DocBuckets2 = DocBuckets
+    end,
+    if Continue == ok ->
+        case write_and_commit(Db, DocBuckets2, Options) of
+        {ok, SavedConflicts} ->
+            {ok, docs_to_revs(Docs2), SavedConflicts};
+        {conflicts, Conflicts} ->
+            {conflicts, Conflicts}
+        end;
+    true ->
+        Continue
+    end.
     
-    DocBuckets2 = lists:zipwith(
-        fun(Bucket, not_found) ->
-            % no existing revs on disk, make sure no old revs specified.
-            [throw(conflict) || #doc{revs=[_NewRev, _OldRev | _]} <- Bucket],
-            [validate_doc_update(Db, Doc, fun()-> nil end) || Doc <- Bucket];
-        (Bucket, {ok, #full_doc_info{rev_tree=OldRevTree}=OldFullDocInfo}) ->
-            Leafs = couch_key_tree:get_all_leafs(OldRevTree),
-            LeafRevsDict = dict:from_list([{Rev, {Deleted, Sp, Revs}} || {Rev, {Deleted, Sp}, Revs} <- Leafs]),
-            [prep_and_validate_new_edit(Db, Doc, OldFullDocInfo, LeafRevsDict) || Doc <- Bucket]
-        end,
-        DocBuckets, ExistingDocs),
-    ok = write_and_commit(Db, DocBuckets2, [new_edits | Options]),
-    {ok, [NewRev ||#doc{revs=[NewRev|_]} <- Docs2]}.
 
+docs_to_revs([]) ->
+    [];
+docs_to_revs([#doc{revs={Start,[RevId|_]}} | Rest]) ->
+    [{Start, RevId} | docs_to_revs(Rest)].
 
 % Returns the first available document on disk. Input list is a full rev path
 % for the doc.
-make_first_doc_on_disk(_Db, _Id, []) ->
+make_first_doc_on_disk(_Db, _Id, _Pos, []) ->
     nil;
-make_first_doc_on_disk(Db, Id, [{_Rev, ?REV_MISSING}|RestPath]) ->
-    make_first_doc_on_disk(Db, Id, RestPath);
-make_first_doc_on_disk(Db, Id, [{_Rev, {IsDel, Sp}} |_]=DocPath) ->
+make_first_doc_on_disk(Db, Id, Pos, [{_Rev, ?REV_MISSING}|RestPath]) ->
+    make_first_doc_on_disk(Db, Id, Pos - 1, RestPath);
+make_first_doc_on_disk(Db, Id, Pos, [{_Rev, {IsDel, Sp}} |_]=DocPath) ->
     Revs = [Rev || {Rev, _} <- DocPath],
-    make_doc(Db, Id, IsDel, Sp, Revs).
+    make_doc(Db, Id, IsDel, Sp, {Pos, Revs}).
 
 
 write_and_commit(#db{update_pid=UpdatePid, user_ctx=Ctx}=Db, DocBuckets,
@@ -370,20 +438,20 @@
     % flush unwritten binaries to disk.
     DocBuckets2 = [[doc_flush_binaries(Doc, Db#db.fd) || Doc <- Bucket] || Bucket <- DocBuckets],
     case gen_server:call(UpdatePid, {update_docs, DocBuckets2, Options}, infinity) of
-    ok -> ok;
+    {ok, SavedConflicts} -> {ok, SavedConflicts};
+    {conflicts, Conflicts} -> {conflicts, Conflicts};
     retry ->
         % This can happen if the db file we wrote to was swapped out by
-        % compaction. Retry writing to the current file
+        % compaction. Retry by reopening the db and writing to the current file
         {ok, Db2} = open_ref_counted(Db#db.main_pid, Ctx),
         DocBuckets3 = [[doc_flush_binaries(Doc, Db2#db.fd) || Doc <- Bucket] || Bucket <- DocBuckets],
         % We only retry once
         close(Db2),
         case gen_server:call(UpdatePid, {update_docs, DocBuckets3, Options}, infinity) of
-        ok -> ok;
+        {ok, SavedConflicts} -> {ok, SavedConflicts};
+        {conflicts, Conflicts} -> {conflicts, Conflicts};
         Else -> throw(Else)
-        end;
-    Else->
-        throw(Else)
+        end
     end.
 
 
@@ -467,7 +535,7 @@
     {Count, _DelCount} = couch_btree:final_reduce(
             fun couch_db_updater:btree_by_id_reduce/2, Reds),
     Count.
-
+    
 count_changes_since(Db, SinceSeq) ->
     {ok, Changes} = 
     couch_btree:fold_reduce(Db#db.docinfo_by_seq_btree,
@@ -479,7 +547,7 @@
         end,
         ok),
     Changes.
-    
+
 enum_docs_since(Db, SinceSeq, Direction, InFun, Ctx) ->
     couch_btree:fold(Db#db.docinfo_by_seq_btree, SinceSeq + 1, Direction, InFun, Ctx).
 
@@ -550,11 +618,11 @@
                     end
                 end,
                 FoundResults =
-                lists:map(fun({Rev, Value, FoundRevPath}) ->
+                lists:map(fun({Value, {Pos, [Rev|_]}=FoundRevPath}) ->
                     case Value of
                     ?REV_MISSING ->
                         % we have the rev in our list but know nothing about it
-                        {{not_found, missing}, Rev};
+                        {{not_found, missing}, {Pos, Rev}};
                     {IsDeleted, SummaryPtr} ->
                         {ok, make_doc(Db, Id, IsDeleted, SummaryPtr, FoundRevPath)}
                     end
@@ -572,18 +640,18 @@
 open_doc_int(Db, <<?LOCAL_DOC_PREFIX, _/binary>> = Id, _Options) ->
     case couch_btree:lookup(Db#db.local_docs_btree, [Id]) of
     [{ok, {_, {Rev, BodyData}}}] ->
-        {ok, #doc{id=Id, revs=[list_to_binary(integer_to_list(Rev))], body=BodyData}};
+        {ok, #doc{id=Id, revs={0, [list_to_binary(integer_to_list(Rev))]}, body=BodyData}};
     [not_found] ->
         {not_found, missing}
     end;
-open_doc_int(Db, #doc_info{id=Id,rev=Rev,deleted=IsDeleted,summary_pointer=Sp}=DocInfo, Options) ->
-    Doc = make_doc(Db, Id, IsDeleted, Sp, [Rev]),
+open_doc_int(Db, #doc_info{id=Id,rev={Pos,RevId},deleted=IsDeleted,summary_pointer=Sp}=DocInfo, Options) ->
+    Doc = make_doc(Db, Id, IsDeleted, Sp, {Pos,[RevId]}),
     {ok, Doc#doc{meta=doc_meta_info(DocInfo, [], Options)}};
 open_doc_int(Db, #full_doc_info{id=Id,rev_tree=RevTree}=FullDocInfo, Options) ->
     #doc_info{deleted=IsDeleted,rev=Rev,summary_pointer=Sp} = DocInfo =
         couch_doc:to_doc_info(FullDocInfo),
-    {[{_Rev,_Value, Revs}], []} = couch_key_tree:get(RevTree, [Rev]),
-    Doc = make_doc(Db, Id, IsDeleted, Sp, Revs),
+    {[{_, RevPath}], []} = couch_key_tree:get(RevTree, [Rev]),
+    Doc = make_doc(Db, Id, IsDeleted, Sp, RevPath),
     {ok, Doc#doc{meta=doc_meta_info(DocInfo, RevTree, Options)}};
 open_doc_int(Db, Id, Options) ->
     case get_full_doc_info(Db, Id) of
@@ -597,9 +665,10 @@
     case lists:member(revs_info, Options) of
     false -> [];
     true ->
-        {[RevPath],[]} = 
+        {[{Pos, RevPath}],[]} = 
             couch_key_tree:get_full_key_paths(RevTree, [DocInfo#doc_info.rev]),
-        [{revs_info, lists:map(
+        
+        [{revs_info, Pos, lists:map(
             fun({Rev, {true, _Sp}}) -> 
                 {Rev, deleted};
             ({Rev, {false, _Sp}}) ->
@@ -626,13 +695,15 @@
     end.
 
 
-doc_to_tree(Doc) ->
-    doc_to_tree(Doc, lists:reverse(Doc#doc.revs)).
+doc_to_tree(#doc{revs={Start, RevIds}}=Doc) ->
+    [Tree] = doc_to_tree_simple(Doc, lists:reverse(RevIds)),
+    {Start - length(RevIds) + 1, Tree}.
+
 
-doc_to_tree(Doc, [RevId]) ->
+doc_to_tree_simple(Doc, [RevId]) ->
     [{RevId, Doc, []}];
-doc_to_tree(Doc, [RevId | Rest]) ->
-    [{RevId, ?REV_MISSING, doc_to_tree(Doc, Rest)}].
+doc_to_tree_simple(Doc, [RevId | Rest]) ->
+    [{RevId, ?REV_MISSING, doc_to_tree_simple(Doc, Rest)}].
 
 make_doc(Db, FullDocInfo) ->
     {#doc_info{id=Id,deleted=Deleted,summary_pointer=Sp}, RevPath}

Modified: couchdb/branches/rep_security/src/couchdb/couch_db.hrl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_db.hrl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_db.hrl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_db.hrl Sat Feb  7 05:23:02 2009
@@ -67,7 +67,7 @@
 -record(doc,
     {
     id = <<"">>,
-    revs = [],
+    revs = {0, []},
 
     % the json body object.
     body = {[]},

Modified: couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_db_updater.erl Sat Feb  7 05:23:02 2009
@@ -41,19 +41,24 @@
 terminate(_Reason, Db) ->
     close_db(Db).
 
+docs_to_revs([]) ->
+    [];
+docs_to_revs([#doc{revs={Start,[RevId|_]}} | Rest]) ->
+    [{Start, RevId} | docs_to_revs(Rest)].
+
 handle_call(get_db, _From, Db) ->
     {reply, {ok, Db}, Db};
 handle_call({update_docs, DocActions, Options}, _From, Db) ->
     try update_docs_int(Db, DocActions, Options) of
-    {ok, Db2} ->
+    {ok, Conflicts, Db2} ->
         ok = gen_server:call(Db#db.main_pid, {db_updated, Db2}),
         couch_db_update_notifier:notify({updated, Db2#db.name}),
-        {reply, ok, Db2}
+        {reply, {ok, docs_to_revs(Conflicts)}, Db2}
     catch
         throw: retry ->
             {reply, retry, Db};
-        throw: conflict ->
-            {reply, conflict, Db}
+        throw: {conflicts, Conflicts} ->
+            {reply, {conflicts, docs_to_revs(Conflicts)}, Db}
     end;
 handle_call(full_commit, _From, #db{waiting_delayed_commit=nil}=Db) ->
     {reply, ok, Db}; % no data waiting, return ok immediately
@@ -362,40 +367,30 @@
         end, Unflushed),
     flush_trees(Db, RestUnflushed, [InfoUnflushed#full_doc_info{rev_tree=Flushed} | AccFlushed]).
 
-merge_rev_trees(_NoConflicts, [], [], AccNewInfos, AccSeq) ->
-    {ok, lists:reverse(AccNewInfos), AccSeq};
-merge_rev_trees(NoConflicts, [NewDocs|RestDocsList],
-        [OldDocInfo|RestOldInfo], AccNewInfos, AccSeq) ->
-    #full_doc_info{id=Id,rev_tree=OldTree}=OldDocInfo,
-    UpdatesRevTree = lists:foldl(
-        fun(NewDoc, AccTree) ->
-            couch_key_tree:merge(AccTree, couch_db:doc_to_tree(NewDoc))
+merge_rev_trees([], [], AccNewInfos, AccConflicts, AccSeq) ->
+    {ok, lists:reverse(AccNewInfos), AccConflicts, AccSeq};
+merge_rev_trees([NewDocs|RestDocsList],
+        [OldDocInfo|RestOldInfo], AccNewInfos, AccConflicts, AccSeq) ->
+    #full_doc_info{id=Id,rev_tree=OldTree,deleted=OldDeleted}=OldDocInfo,
+    {NewRevTree, NewConflicts} = lists:foldl(
+        fun(NewDoc, {AccTree, AccConflicts2}) ->
+            case couch_key_tree:merge(AccTree, [couch_db:doc_to_tree(NewDoc)]) of
+            {NewTree, conflicts} when not OldDeleted ->
+                {NewTree, [NewDoc | AccConflicts2]};
+            {NewTree, _} ->
+                {NewTree, AccConflicts2}
+            end
         end,
-        [], NewDocs),
-    NewRevTree = couch_key_tree:merge(OldTree, UpdatesRevTree),
+        {OldTree, AccConflicts}, NewDocs),
     if NewRevTree == OldTree ->
         % nothing changed
-        merge_rev_trees(NoConflicts, RestDocsList, RestOldInfo, AccNewInfos, AccSeq);
+        merge_rev_trees(RestDocsList, RestOldInfo, AccNewInfos,
+                NewConflicts, AccSeq);
     true ->
-        if NoConflicts andalso OldTree /= [] ->
-            OldConflicts = couch_key_tree:count_leafs(OldTree),
-            NewConflicts = couch_key_tree:count_leafs(NewRevTree),
-            if NewConflicts > OldConflicts ->
-                % if all the old docs are deletions, allow this new conflict
-                case [1 || {_Rev,{IsDel,_Sp},_Path} <- 
-                    couch_key_tree:get_all_leafs(OldTree), IsDel==false] of
-                [] ->
-                    ok;
-                _ ->
-                    throw(conflict)
-                end;
-            true -> ok
-            end;
-        true -> ok
-        end,
+        % we have updated the document, give it a new seq #
         NewInfo = #full_doc_info{id=Id,update_seq=AccSeq+1,rev_tree=NewRevTree},
-        merge_rev_trees(NoConflicts, RestDocsList,RestOldInfo, 
-                [NewInfo|AccNewInfos],AccSeq+1)
+        merge_rev_trees(RestDocsList,RestOldInfo, 
+                [NewInfo|AccNewInfos], NewConflicts, AccSeq+1)
     end.
 
 new_index_entries([], AccById, AccBySeq) ->
@@ -412,13 +407,11 @@
         docinfo_by_seq_btree = DocInfoBySeqBTree,
         update_seq = LastSeq
         } = Db,
-
     % separate out the NonRep documents from the rest of the documents
     {DocsList2, NonRepDocs} = lists:foldl(
-        fun([#doc{id=Id}=Doc | Rest]=Docs, {DocsListAcc, NonRepDocsAcc}) ->
+        fun([#doc{id=Id}=Doc | _]=Docs, {DocsListAcc, NonRepDocsAcc}) ->
             case Id of
-            <<?LOCAL_DOC_PREFIX, _/binary>> when Rest==[] ->
-                % when saving NR (non rep) documents, you can only save a single rev
+            <<?LOCAL_DOC_PREFIX, _/binary>> ->
                 {DocsListAcc, [Doc | NonRepDocsAcc]};
             Id->
                 {[Docs | DocsListAcc], NonRepDocsAcc}
@@ -438,18 +431,23 @@
         Ids, OldDocLookups),
     
     % Merge the new docs into the revision trees.
-    NoConflicts = lists:member(new_edits, Options),
-    {ok, NewDocInfos, NewSeq} = merge_rev_trees(NoConflicts, DocsList2, OldDocInfos, [], LastSeq),
+    {ok, NewDocInfos, Conflicts, NewSeq} =
+            merge_rev_trees(DocsList2, OldDocInfos, [], [], LastSeq),
+    
+    case (Conflicts /= []) and (not lists:member(merge_conflicts, Options)) of
+    true -> throw({conflicts, Conflicts});
+    false -> ok
+    end,
     
     RemoveSeqs =
-        [ OldSeq || {ok, #full_doc_info{update_seq=OldSeq}} <- OldDocLookups],
+        [OldSeq || {ok, #full_doc_info{update_seq=OldSeq}} <- OldDocLookups],
     
     % All regular documents are now ready to write.
     
     % Try to write the local documents first, a conflict might be generated
     {ok, Db2}  = update_local_docs(Db, NonRepDocs),
-    
-    % Write out the document summaries (they are stored in the nodes of the rev trees)
+    % Write out the document summaries (the bodies are stored in the nodes of
+    % the trees, the attachments are already written to disk)
     {ok, FlushedDocInfos} = flush_trees(Db2, NewDocInfos, []),
     
     {ok, InfoById, InfoBySeq} = new_index_entries(FlushedDocInfos, [], []),
@@ -463,6 +461,8 @@
         docinfo_by_seq_btree = DocInfoBySeqBTree2,
         update_seq = NewSeq},
     
+    % Check if we just updated any design documents, and update the validation
+    % funs if we did.
     case [1 || <<"_design/",_/binary>> <- Ids] of
     [] ->
         Db4 = Db3;
@@ -470,17 +470,17 @@
         Db4 = refresh_validate_doc_funs(Db3)
     end,
     
-    {ok, commit_data(Db4, not lists:member(full_commit, Options))}.
-
+    {ok, Conflicts, commit_data(Db4, not lists:member(full_commit, Options))}.
+    
 update_local_docs(#db{local_docs_btree=Btree}=Db, Docs) ->
     Ids = [Id || #doc{id=Id} <- Docs],
     OldDocLookups = couch_btree:lookup(Btree, Ids),
     BtreeEntries = lists:zipwith(
-        fun(#doc{id=Id,deleted=Delete,revs=Revs,body=Body}, OldDocLookup) ->
+        fun(#doc{id=Id,deleted=Delete,revs={0,Revs},body=Body}=Doc, OldDocLookup) ->
             NewRev =
             case Revs of
                 [] -> 0;
-                [RevStr|_] -> list_to_integer(binary_to_list(RevStr))
+                [RevStr] -> list_to_integer(?b2l(RevStr))
             end,
             OldRev =
             case OldDocLookup of
@@ -494,14 +494,14 @@
                     true  -> {remove, Id}
                 end;
             false ->
-                throw(conflict)
+                throw({conflicts, [Doc]})
             end
             
         end, Docs, OldDocLookups),
 
     BtreeIdsRemove = [Id || {remove, Id} <- BtreeEntries],
     BtreeIdsUpdate = [ByIdDocInfo || {update, ByIdDocInfo} <- BtreeEntries],
-
+    
     {ok, Btree2} =
         couch_btree:add_remove(Btree, BtreeIdsUpdate, BtreeIdsRemove),
 
@@ -553,6 +553,10 @@
 
 copy_rev_tree(_SrcFd, _DestFd, _DestStream, []) ->
     [];
+copy_rev_tree(SrcFd, DestFd, DestStream, [{Start, Tree} | RestTree]) ->
+    % root nner node, only copy info/data from leaf nodes
+    [Tree2] = copy_rev_tree(SrcFd, DestFd, DestStream, [Tree]),
+    [{Start, Tree2} | copy_rev_tree(SrcFd, DestFd, DestStream, RestTree)];
 copy_rev_tree(SrcFd, DestFd, DestStream, [{RevId, {IsDel, Sp}, []} | RestTree]) ->
     % This is a leaf node, copy it over
     NewSp = copy_raw_doc(SrcFd, Sp, DestFd, DestStream),

Modified: couchdb/branches/rep_security/src/couchdb/couch_doc.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_doc.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_doc.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_doc.erl Sat Feb  7 05:23:02 2009
@@ -12,41 +12,53 @@
 
 -module(couch_doc).
 
--export([to_doc_info/1,to_doc_info_path/1]).
+-export([to_doc_info/1,to_doc_info_path/1,parse_rev/1,parse_revs/1,to_rev_str/1,to_rev_strs/1]).
 -export([bin_foldl/3,bin_size/1,bin_to_binary/1,get_validate_doc_fun/1]).
 -export([from_json_obj/1,to_json_obj/2,has_stubs/1, merge_stubs/2]).
 
 -include("couch_db.hrl").
 
 % helpers used by to_json_obj
-to_json_rev([]) ->
+to_json_rev(0, []) ->
     [];
-to_json_rev(Revs) ->
-    [{<<"_rev">>, lists:nth(1, Revs)}].
+to_json_rev(Start, [FirstRevId|_]) ->
+    [{<<"_rev">>, ?l2b([integer_to_list(Start),"-",FirstRevId])}].
 
 to_json_body(true, _Body) ->
     [{<<"_deleted">>, true}];
 to_json_body(false, {Body}) ->
     Body.
 
-to_json_revs(Options, Revs) ->
+to_json_revisions(Options, Start, RevIds) ->
     case lists:member(revs, Options) of
     false -> [];
     true ->
-        [{<<"_revs">>, Revs}]
+        [{<<"_revisions">>, {[{<<"start">>, Start}, 
+                        {<<"ids">>, RevIds}]}}]
     end.
 
-to_json_revs_info(Meta) ->
+to_rev_str({Pos, RevId}) ->
+    ?l2b([integer_to_list(Pos),"-",RevId]).
+
+to_rev_strs([]) ->
+    [];
+to_rev_strs([{Pos, RevId}| Rest]) ->
+    [to_rev_str({Pos, RevId}) | to_rev_strs(Rest)].
+
+to_json_meta(Meta) ->
     lists:map(
-        fun({revs_info, RevsInfo}) ->
-            JsonRevsInfo =
-            [{[{rev, Rev}, {status, list_to_binary(atom_to_list(Status))}]} ||
-                {Rev, Status} <- RevsInfo],
+        fun({revs_info, Start, RevsInfo}) ->
+            {JsonRevsInfo, _Pos}  = lists:mapfoldl(
+                fun({RevId, Status}, PosAcc) ->
+                    JsonObj = {[{<<"rev">>, to_rev_str({PosAcc, RevId})},
+                        {<<"status">>, ?l2b(atom_to_list(Status))}]},
+                    {JsonObj, PosAcc - 1}
+                end, Start, RevsInfo),
             {<<"_revs_info">>, JsonRevsInfo};
         ({conflicts, Conflicts}) ->
-            {<<"_conflicts">>, Conflicts};
-        ({deleted_conflicts, Conflicts}) ->
-            {<<"_deleted_conflicts">>, Conflicts}
+            {<<"_conflicts">>, to_rev_strs(Conflicts)};
+        ({deleted_conflicts, DConflicts}) ->
+            {<<"_deleted_conflicts">>, to_rev_strs(DConflicts)}
         end, Meta).
 
 to_json_attachment_stubs(Attachments) ->
@@ -86,17 +98,55 @@
         to_json_attachment_stubs(Attachments)
     end.
 
-to_json_obj(#doc{id=Id,deleted=Del,body=Body,revs=Revs,meta=Meta}=Doc,Options)->
+to_json_obj(#doc{id=Id,deleted=Del,body=Body,revs={Start, RevIds},
+            meta=Meta}=Doc,Options)->
     {[{<<"_id">>, Id}] 
-        ++ to_json_rev(Revs) 
+        ++ to_json_rev(Start, RevIds)
         ++ to_json_body(Del, Body)
-        ++ to_json_revs(Options, Revs) 
-        ++ to_json_revs_info(Meta)
+        ++ to_json_revisions(Options, Start, RevIds) 
+        ++ to_json_meta(Meta)
         ++ to_json_attachments(Doc#doc.attachments, Options)
     }.
 
 from_json_obj({Props}) ->
-    {JsonBins} = proplists:get_value(<<"_attachments">>, Props, {[]}),
+    transfer_fields(Props, #doc{body=[]});
+
+from_json_obj(_Other) ->
+    throw({invalid_json_object, "Document must be a JSON object"}).
+
+parse_rev(Rev) when is_binary(Rev) ->
+    parse_rev(?b2l(Rev));
+parse_rev(Rev) ->
+    {Pos, [$- | RevId]} = lists:splitwith(fun($-) -> false; (_) -> true end, Rev),
+    {list_to_integer(Pos), ?l2b(RevId)}.
+
+parse_revs([]) ->
+    [];
+parse_revs([Rev | Rest]) ->
+    [parse_rev(Rev) | parse_revs(Rest)].
+
+
+transfer_fields([], #doc{body=Fields}=Doc) ->
+    % convert fields back to json object
+    Doc#doc{body={lists:reverse(Fields)}};
+    
+transfer_fields([{<<"_id">>, Id} | Rest], Doc) when is_binary(Id) ->
+    transfer_fields(Rest, Doc#doc{id=Id});
+    
+transfer_fields([{<<"_id">>, Id} | _Rest], _Doc) ->
+    ?LOG_DEBUG("Document id is not a string: ~p", [Id]),
+    throw({invalid_document_id, "Document id is not a string"});
+    
+transfer_fields([{<<"_rev">>, Rev} | Rest], #doc{revs={0, []}}=Doc) ->
+    {Pos, RevId} = parse_rev(Rev),
+    transfer_fields(Rest,
+            Doc#doc{revs={Pos, [RevId]}});
+            
+transfer_fields([{<<"_rev">>, _Rev} | Rest], Doc) ->
+    % we already got the rev from the _revisions
+    transfer_fields(Rest,Doc);
+    
+transfer_fields([{<<"_attachments">>, {JsonBins}} | Rest], Doc) ->
     Bins = lists:flatmap(fun({Name, {BinProps}}) ->
         case proplists:get_value(<<"stub">>, BinProps) of
         true ->
@@ -108,47 +158,40 @@
             [{Name, {Type, couch_util:decodeBase64(Value)}}]
         end
     end, JsonBins),
-    AllowedSpecialMembers = [<<"id">>, <<"revs">>, <<"rev">>, <<"attachments">>, <<"revs_info">>,
-        <<"conflicts">>, <<"deleted_conflicts">>, <<"deleted">>],
-    % collect all the doc-members that start with "_"
-    % if any aren't in the AllowedSpecialMembers list 
-    % then throw a doc_validation error
-    [case lists:member(Name, AllowedSpecialMembers) of
-        true ->
-            ok;
-        false ->
-            throw({doc_validation, io_lib:format("Bad special document member: _~s", [Name])})
-        end
-         || {<<$_,Name/binary>>, _Value} <- Props],
-    Revs =
-    case proplists:get_value(<<"_revs">>, Props, []) of
-    [] ->
-        case proplists:get_value(<<"_rev">>, Props) of
-        undefined -> [];
-        Rev -> [Rev]
-        end;
-    Revs0 ->
-        Revs0
-    end,
-    case proplists:get_value(<<"_id">>, Props, <<>>) of
-    Id when is_binary(Id) -> ok;
-    Id ->
-        ?LOG_DEBUG("Document id is not a string: ~p", [Id]),
-        throw({invalid_document_id, "Document id is not a string"})
+    transfer_fields(Rest, Doc#doc{attachments=Bins});
+    
+transfer_fields([{<<"_revisions">>, {Props}} | Rest], Doc) ->
+    RevIds = proplists:get_value(<<"ids">>, Props),
+    Start = proplists:get_value(<<"start">>, Props),
+    if not is_integer(Start) ->
+        throw({doc_validation, "_revisions.start isn't an integer."});
+    not is_list(RevIds) ->
+        throw({doc_validation, "_revisions.ids isn't a array."});
+    true ->
+        ok
     end,
+    [throw({doc_validation, "RevId isn't a string"}) ||
+            RevId <- RevIds, not is_binary(RevId)],
+    transfer_fields(Rest, Doc#doc{revs={Start, RevIds}});
     
-    % strip out the all props beginning with _
-    NewBody = {[{K, V} || {<<First,_/binary>>=K, V} <- Props, First /= $_]},
-    #doc{
-        id = Id,
-        revs = Revs,
-        deleted = proplists:get_value(<<"_deleted">>, Props, false),
-        body = NewBody,
-        attachments = Bins
-        };
+transfer_fields([{<<"_deleted">>, B} | Rest], Doc) when (B==true) or (B==false) ->
+    transfer_fields(Rest, Doc#doc{deleted=B});
 
-from_json_obj(_Other) ->
-    throw({invalid_json_object, "Document must be a JSON object"}).
+% ignored fields
+transfer_fields([{<<"_revs_info">>, _} | Rest], Doc) ->
+    transfer_fields(Rest, Doc);
+transfer_fields([{<<"_conflicts">>, _} | Rest], Doc) ->
+    transfer_fields(Rest, Doc);
+transfer_fields([{<<"_deleted_conflicts">>, _} | Rest], Doc) ->
+    transfer_fields(Rest, Doc);
+
+% unknown special field
+transfer_fields([{<<"_",Name/binary>>, Start} | _], _) when is_integer(Start) ->
+    throw({doc_validation,
+            ?l2b(io_lib:format("Bad special document member: _~s", [Name]))});
+            
+transfer_fields([Field | Rest], #doc{body=Fields}=Doc) ->
+    transfer_fields(Rest, Doc#doc{body=[Field|Fields]}).
 
 to_doc_info(FullDocInfo) ->
     {DocInfo, _Path} = to_doc_info_path(FullDocInfo),
@@ -157,27 +200,26 @@
 to_doc_info_path(#full_doc_info{id=Id,update_seq=Seq,rev_tree=Tree}) ->
     LeafRevs = couch_key_tree:get_all_leafs(Tree),
     SortedLeafRevs =
-    lists:sort(fun({RevIdA, {IsDeletedA, _}, PathA}, {RevIdB, {IsDeletedB, _}, PathB}) ->
+    lists:sort(fun({{IsDeletedA, _}, {StartA, [RevIdA|_]}}, {{IsDeletedB, _}, {StartB, [RevIdB|_]}}) ->
             % sort descending by {not deleted, then Depth, then RevisionId}
-            A = {not IsDeletedA, length(PathA), RevIdA},
-            B = {not IsDeletedB, length(PathB), RevIdB},
+            A = {not IsDeletedA, StartA, RevIdA},
+            B = {not IsDeletedB, StartB, RevIdB},
             A > B
         end,
         LeafRevs),
 
-    [{RevId, {IsDeleted, SummaryPointer}, Path} | Rest] = SortedLeafRevs,
-
+    [{{IsDeleted, SummaryPointer}, {Start, [RevId|_]}=Path} | Rest] = SortedLeafRevs,
     {ConflictRevTuples, DeletedConflictRevTuples} =
-        lists:splitwith(fun({_ConflictRevId, {IsDeleted1, _Sp}, _}) ->
+        lists:splitwith(fun({{IsDeleted1, _Sp}, _}) ->
                 not IsDeleted1
             end, Rest),
 
-    ConflictRevs = [RevId1  || {RevId1, _, _} <- ConflictRevTuples],
-    DeletedConflictRevs = [RevId2   || {RevId2, _, _} <- DeletedConflictRevTuples],
+    ConflictRevs = [{Start1, RevId1}  || {_, {Start1, [RevId1|_]}} <- ConflictRevTuples],
+    DeletedConflictRevs = [{Start1, RevId1}  || {_, {Start1, [RevId1|_]}} <- DeletedConflictRevTuples],
     DocInfo = #doc_info{
         id=Id,
         update_seq=Seq,
-        rev = RevId,
+        rev = {Start, RevId},
         summary_pointer = SummaryPointer,
         conflict_revs = ConflictRevs,
         deleted_conflict_revs = DeletedConflictRevs,

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd.erl Sat Feb  7 05:23:02 2009
@@ -269,8 +269,8 @@
 json_body(Httpd) ->
     ?JSON_DECODE(body(Httpd)).
 
-doc_etag(#doc{revs=[DiskRev|_]}) ->
-    "\"" ++ binary_to_list(DiskRev) ++ "\"".
+doc_etag(#doc{revs={Start, [DiskRev|_]}}) ->
+    "\"" ++ ?b2l(couch_doc:to_rev_str({Start, DiskRev})) ++ "\"".
 
 make_etag(Term) ->
     <<SigInt:128/integer>> = erlang:md5(term_to_binary(Term)),
@@ -370,8 +370,8 @@
     send_error(Req, 404, <<"not_found">>, <<"Missing">>);
 send_error(Req, {not_found, Reason}) ->
     send_error(Req, 404, <<"not_found">>, Reason);
-send_error(Req, conflict) ->
-    send_error(Req, 409, <<"conflict">>, <<"Document update conflict.">>);
+send_error(Req, {conflict, Rev}) ->
+    send_error(Req, 409, <<"conflict">>, Rev);
 send_error(Req, {forbidden, Msg}) ->
     send_json(Req, 403,
         {[{<<"error">>,  <<"forbidden">>},

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd_db.erl Sat Feb  7 05:23:02 2009
@@ -22,8 +22,8 @@
 
 -record(doc_query_args, {
     options = [],
-    rev = "",
-    open_revs = ""
+    rev = nil,
+    open_revs = []
 }).
     
 % Database request handlers
@@ -79,13 +79,13 @@
 db_req(#httpd{method='POST',path_parts=[DbName]}=Req, Db) ->
     Doc = couch_doc:from_json_obj(couch_httpd:json_body(Req)),
     DocId = couch_util:new_uuid(),
-    {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=[]}, []),
+    {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId}, []),
     DocUrl = absolute_uri(Req, 
         binary_to_list(<<"/",DbName/binary,"/",DocId/binary>>)),
     send_json(Req, 201, [{"Location", DocUrl}], {[
         {ok, true},
         {id, DocId},
-        {rev, NewRev}
+        {rev, couch_doc:to_rev_str(NewRev)}
     ]});
 
 db_req(#httpd{path_parts=[_DbName]}=Req, _Db) ->
@@ -119,19 +119,22 @@
                     <<>> -> couch_util:new_uuid();
                     Id0 -> Id0
                 end,
-                Revs = case proplists:get_value(<<"_rev">>, ObjProps) of
-                    undefined -> [];
-                    Rev  -> [Rev]
+                case proplists:get_value(<<"_rev">>, ObjProps) of
+                undefined ->
+                    Revs = {0, []};
+                Rev  ->
+                    {Pos, RevId} = couch_doc:parse_rev(Rev),
+                    Revs = {Pos, [RevId]}
                 end,
                 Doc#doc{id=Id,revs=Revs}
             end,
             DocsArray),
-        {ok, ResultRevs} = couch_db:update_docs(Db, Docs, Options),
+        {ok, ResultRevs, _Conflicts} = couch_db:update_docs(Db, Docs, Options),
 
         % output the results
         DocResults = lists:zipwith(
             fun(Doc, NewRev) ->
-                {[{<<"id">>, Doc#doc.id}, {<<"rev">>, NewRev}]}
+                {[{<<"id">>, Doc#doc.id}, {<<"rev">>, couch_doc:to_rev_str(NewRev)}]}
             end,
             Docs, ResultRevs),
         send_json(Req, 201, {[
@@ -141,7 +144,7 @@
 
     false ->
         Docs = [couch_doc:from_json_obj(JsonObj) || JsonObj <- DocsArray],
-        ok = couch_db:update_docs(Db, Docs, Options, false),
+        ok = couch_db:update_docs(Db, Docs, Options, replicated_changes),
         send_json(Req, 201, {[
             {ok, true}
         ]})
@@ -158,12 +161,12 @@
 
 db_req(#httpd{method='POST',path_parts=[_,<<"_purge">>]}=Req, Db) ->
     {IdsRevs} = couch_httpd:json_body(Req),
-    % validate the json input
-    [{_Id, [_|_]=_Revs} = IdRevs || IdRevs <- IdsRevs],
+    IdsRevs2 = [{Id, couch_doc:parse_revs(Revs)} || {Id, Revs} <- IdsRevs],
     
-    case couch_db:purge_docs(Db, IdsRevs) of
+    case couch_db:purge_docs(Db, IdsRevs2) of
     {ok, PurgeSeq, PurgedIdsRevs} ->
-        send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs}}]});
+        PurgedIdsRevs2 = [{Id, couch_doc:to_rev_strs(Revs)} || {Id, Revs} <- PurgedIdsRevs],
+        send_json(Req, 200, {[{<<"purge_seq">>, PurgeSeq}, {<<"purged">>, {PurgedIdsRevs2}}]});
     Error ->
         throw(Error)
     end;
@@ -214,14 +217,14 @@
                 deleted_conflict_revs=DelConflictRevs
             } = DocInfo,
             Json = {
-                [{<<"rev">>, Rev}] ++
+                [{<<"rev">>, couch_doc:to_rev_str(Rev)}] ++
                 case ConflictRevs of
                     []  ->  [];
-                    _   ->  [{<<"conflicts">>, ConflictRevs}]
+                    _   ->  [{<<"conflicts">>, couch_doc:to_rev_strs(ConflictRevs)}]
                 end ++
                 case DelConflictRevs of
                     []  ->  [];
-                    _   ->  [{<<"deleted_conflicts">>, DelConflictRevs}]
+                    _   ->  [{<<"deleted_conflicts">>, couch_doc:to_rev_strs(DelConflictRevs)}]
                 end ++
                 case Deleted of
                     true -> [{<<"deleted">>, true}];
@@ -237,9 +240,11 @@
 
 db_req(#httpd{method='POST',path_parts=[_,<<"_missing_revs">>]}=Req, Db) ->
     {JsonDocIdRevs} = couch_httpd:json_body(Req),
-    {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs),
+    JsonDocIdRevs2 = [{Id, [couch_doc:parse_rev(RevStr) || RevStr <- RevStrs]} || {Id, RevStrs} <- JsonDocIdRevs],
+    {ok, Results} = couch_db:get_missing_revs(Db, JsonDocIdRevs2),
+    Results2 = [{Id, [couch_doc:to_rev_str(Rev) || Rev <- Revs]} || {Id, Revs} <- Results],
     send_json(Req, {[
-        {missing_revs, {Results}}
+        {missing_revs, {Results2}}
     ]});
 
 db_req(#httpd{path_parts=[_,<<"_missing_revs">>]}=Req, _Db) ->
@@ -318,7 +323,7 @@
         AdapterFun = fun(#full_doc_info{id=Id}=FullDocInfo, Offset, Acc) ->
             case couch_doc:to_doc_info(FullDocInfo) of
             #doc_info{deleted=false, rev=Rev} ->
-                FoldlFun({{Id, Id}, {[{rev, Rev}]}}, Offset, Acc);
+                FoldlFun({{Id, Id}, {[{rev, couch_doc:to_rev_str(Rev)}]}}, Offset, Acc);
             #doc_info{deleted=true} ->
                 {ok, Acc}
             end
@@ -342,9 +347,9 @@
                 DocInfo = (catch couch_db:get_doc_info(Db, Key)),
                 Doc = case DocInfo of
                 {ok, #doc_info{id=Id, rev=Rev, deleted=false}} = DocInfo ->
-                    {{Id, Id}, {[{rev, Rev}]}};
+                    {{Id, Id}, {[{rev, couch_doc:to_rev_str(Rev)}]}};
                 {ok, #doc_info{id=Id, rev=Rev, deleted=true}} = DocInfo ->
-                    {{Id, Id}, {[{rev, Rev}, {deleted, true}]}};
+                    {{Id, Id}, {[{rev, couch_doc:to_rev_str(Rev)}, {deleted, true}]}};
                 not_found ->
                     {{Key, error}, not_found};
                 _ ->
@@ -364,20 +369,12 @@
 
 
 
-
-
 db_doc_req(#httpd{method='DELETE'}=Req, Db, DocId) ->
-    case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of
-    missing_rev ->
-        couch_httpd:send_error(Req, 409, <<"missing_rev">>,
-            <<"Document rev/etag must be specified to delete">>);
-    RevToDelete ->
-        {ok, NewRev} = couch_db:delete_doc(Db, DocId, [RevToDelete]),
-        send_json(Req, 200, {[
-            {ok, true},
-            {id, DocId},
-            {rev, NewRev}
-            ]})
+    case couch_httpd:qs_value(Req, "rev") of
+    undefined ->
+        update_doc(Req, Db, DocId, {[{<<"_deleted">>,true}]});
+    Rev ->
+        update_doc(Req, Db, DocId, {[{<<"_rev">>, ?l2b(Rev)},{<<"_deleted">>,true}]})
     end;
 
 db_doc_req(#httpd{method='GET'}=Req, Db, DocId) ->
@@ -423,7 +420,7 @@
 
 db_doc_req(#httpd{method='POST'}=Req, Db, DocId) ->
     Form = couch_httpd:parse_form(Req),
-    Rev = list_to_binary(proplists:get_value("_rev", Form)),
+    Rev = couch_doc:parse_rev(proplists:get_value("_rev", Form)),
     Doc = case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
         {ok, [{ok, Doc0}]}  -> Doc0#doc{revs=[Rev]};
         {ok, [Error]}       -> throw(Error)
@@ -447,55 +444,30 @@
     ]});
 
 db_doc_req(#httpd{method='PUT'}=Req, Db, DocId) ->
-    Json = couch_httpd:json_body(Req),
-    Doc = couch_doc:from_json_obj(Json),
-    ExplicitRev =
-    case Doc#doc.revs of
-        [Rev0|_] -> Rev0;
-        [] -> undefined
-    end,
-    case couch_httpd:header_value(Req, "X-Couch-Full-Commit", "false") of
-    "true" ->
-        Options = [full_commit];
-    _ ->
-        Options = []
-    end,
-    case extract_header_rev(Req, ExplicitRev) of
-    missing_rev ->
-        Revs = [];
-    Rev ->
-        Revs = [Rev]
-    end,
-    {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=Revs}, Options),
-    send_json(Req, 201, [{"Etag", <<"\"", NewRev/binary, "\"">>}], {[
-        {ok, true},
-        {id, DocId},
-        {rev, NewRev}
-    ]});
+    update_doc(Req, Db, DocId, couch_httpd:json_body(Req));
 
 db_doc_req(#httpd{method='COPY'}=Req, Db, SourceDocId) ->
     SourceRev =
     case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of
-        missing_rev -> [];
+        missing_rev -> nil;
         Rev -> Rev
     end,
 
-    {TargetDocId, TargetRev} = parse_copy_destination_header(Req),
+    {TargetDocId, TargetRevs} = parse_copy_destination_header(Req),
 
     % open revision Rev or Current  
     Doc = couch_doc_open(Db, SourceDocId, SourceRev, []),
-
     % save new doc
-    {ok, NewTargetRev} = couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRev}, []),
+    {ok, NewTargetRev} = couch_db:update_doc(Db, Doc#doc{id=TargetDocId, revs=TargetRevs}, []),
 
-    send_json(Req, 201, [{"Etag", "\"" ++ binary_to_list(NewTargetRev) ++ "\""}], {[
+    send_json(Req, 201, [{"Etag", "\"" ++ ?b2l(couch_doc:to_rev_str(NewTargetRev)) ++ "\""}], {[
         {ok, true},
         {id, TargetDocId},
-        {rev, NewTargetRev}
+        {rev, couch_doc:to_rev_str(NewTargetRev)}
     ]});
 
 db_doc_req(#httpd{method='MOVE'}=Req, Db, SourceDocId) ->
-    SourceRev =
+    SourceRev = {SourceRevPos, SourceRevId} =
     case extract_header_rev(Req, couch_httpd:qs_value(Req, "rev")) of
     missing_rev -> 
         throw({bad_request, "MOVE requires a specified rev parameter"
@@ -503,21 +475,21 @@
     Rev -> Rev
     end,
 
-    {TargetDocId, TargetRev} = parse_copy_destination_header(Req),
+    {TargetDocId, TargetRevs} = parse_copy_destination_header(Req),
     % open revision Rev or Current
     Doc = couch_doc_open(Db, SourceDocId, SourceRev, []),
 
     % save new doc & delete old doc in one operation
     Docs = [
-        Doc#doc{id=TargetDocId, revs=TargetRev},
-        #doc{id=SourceDocId, revs=[SourceRev], deleted=true}
+        Doc#doc{id=TargetDocId, revs=TargetRevs},
+        #doc{id=SourceDocId, revs={SourceRevPos, [SourceRevId]}, deleted=true}
         ],
 
-    {ok, ResultRevs} = couch_db:update_docs(Db, Docs, []),
+    {ok, ResultRevs, _} = couch_db:update_docs(Db, Docs, []),
 
     DocResults = lists:zipwith(
         fun(FDoc, NewRev) ->
-            {[{id, FDoc#doc.id}, {rev, NewRev}]}
+            {[{id, FDoc#doc.id}, {rev, couch_doc:to_rev_str(NewRev)}]}
         end,
         Docs, ResultRevs),
     send_json(Req, 201, {[
@@ -528,13 +500,42 @@
 db_doc_req(Req, _Db, _DocId) ->
     send_method_not_allowed(Req, "DELETE,GET,HEAD,POST,PUT,COPY,MOVE").
 
+
+update_doc(Req, Db, DocId, Json) ->
+    #doc{deleted=Deleted} = Doc = couch_doc:from_json_obj(Json),
+    ExplicitDocRev =
+    case Doc#doc.revs of
+        {Start,[RevId|_]} -> {Start, RevId};
+        _ -> undefined
+    end,
+    case extract_header_rev(Req, ExplicitDocRev) of
+    missing_rev ->
+        Revs = {0, []};
+    {Pos, Rev} ->
+        Revs = {Pos, [Rev]}
+    end,
+    
+    case couch_httpd:header_value(Req, "X-Couch-Full-Commit", "false") of
+    "true" ->
+        Options = [full_commit];
+    _ ->
+        Options = []
+    end,
+    {ok, NewRev} = couch_db:update_doc(Db, Doc#doc{id=DocId, revs=Revs}, Options),
+    NewRevStr = couch_doc:to_rev_str(NewRev),
+    send_json(Req, if Deleted -> 200; true -> 201 end,
+        [{"Etag", <<"\"", NewRevStr/binary, "\"">>}], {[
+            {ok, true},
+            {id, DocId},
+            {rev, NewRevStr}]}).
+
 % Useful for debugging
 % couch_doc_open(Db, DocId) ->
 %   couch_doc_open(Db, DocId, [], []).
 
 couch_doc_open(Db, DocId, Rev, Options) ->
     case Rev of
-    "" -> % open most recent rev
+    nil -> % open most recent rev
         case couch_db:open_doc(Db, DocId, Options) of
         {ok, Doc} ->
             Doc;
@@ -555,13 +556,13 @@
 db_attachment_req(#httpd{method='GET'}=Req, Db, DocId, FileNameParts) ->
     FileName = list_to_binary(mochiweb_util:join(lists:map(fun binary_to_list/1, FileNameParts),"/")),
     case couch_db:open_doc(Db, DocId, []) of
-    {ok, #doc{attachments=Attachments, revs=[LastRev|_OldRevs]}} ->
+    {ok, #doc{attachments=Attachments}=Doc} ->
         case proplists:get_value(FileName, Attachments) of
         undefined ->
             throw({not_found, "Document is missing attachment"});
         {Type, Bin} ->
             {ok, Resp} = start_chunked_response(Req, 200, [
-                {"ETag", binary_to_list(LastRev)},
+                {"ETag", couch_httpd:doc_etag(Doc)},
                 {"Cache-Control", "must-revalidate"},
                 {"Content-Type", binary_to_list(Type)}%,
                 % My understanding of http://www.faqs.org/rfcs/rfc2616.html
@@ -607,7 +608,7 @@
             #doc{id=DocId};
         Rev ->
             case couch_db:open_doc_revs(Db, DocId, [Rev], []) of
-            {ok, [{ok, Doc0}]}  -> Doc0#doc{revs=[Rev]};
+            {ok, [{ok, Doc0}]}  -> Doc0;
             {ok, [Error]}       -> throw(Error)
             end
     end,
@@ -620,7 +621,7 @@
     send_json(Req, case Method of 'DELETE' -> 200; _ -> 201 end, {[
         {ok, true},
         {id, DocId},
-        {rev, UpdatedRev}
+        {rev, couch_doc:to_rev_str(UpdatedRev)}
     ]});
 
 db_attachment_req(Req, _Db, _DocId, _FileNameParts) ->
@@ -649,26 +650,26 @@
             Options = [deleted_conflicts | Args#doc_query_args.options],
             Args#doc_query_args{options=Options};
         {"rev", Rev} ->
-            Args#doc_query_args{rev=list_to_binary(Rev)};
+            Args#doc_query_args{rev=couch_doc:parse_rev(Rev)};
         {"open_revs", "all"} ->
             Args#doc_query_args{open_revs=all};
         {"open_revs", RevsJsonStr} ->
             JsonArray = ?JSON_DECODE(RevsJsonStr),
-            Args#doc_query_args{open_revs=JsonArray};
+            Args#doc_query_args{open_revs=[couch_doc:parse_rev(Rev) || Rev <- JsonArray]};
         _Else -> % unknown key value pair, ignore.
             Args
         end
     end, #doc_query_args{}, couch_httpd:qs(Req)).
 
 
-
-extract_header_rev(Req, ExplicitRev) when is_list(ExplicitRev)->
-    extract_header_rev(Req, list_to_binary(ExplicitRev));
+extract_header_rev(Req, ExplicitRev) when is_binary(ExplicitRev) or is_list(ExplicitRev)->
+    extract_header_rev(Req, couch_doc:parse_rev(ExplicitRev));
 extract_header_rev(Req, ExplicitRev) ->
     Etag = case couch_httpd:header_value(Req, "If-Match") of
         undefined -> undefined;
-        Value -> list_to_binary(string:strip(Value, both, $"))
+        Value -> couch_doc:parse_rev(string:strip(Value, both, $"))
     end,
+
     case {ExplicitRev, Etag} of
     {undefined, undefined} -> missing_rev;
     {_, undefined} -> ExplicitRev;
@@ -683,10 +684,11 @@
     Destination = couch_httpd:header_value(Req, "Destination"),
     case regexp:match(Destination, "\\?") of
     nomatch -> 
-        {list_to_binary(Destination), []};
+        {list_to_binary(Destination), {0, []}};
     {match, _, _} ->
         {ok, [DocId, RevQueryOptions]} = regexp:split(Destination, "\\?"),
         {ok, [_RevQueryKey, Rev]} = regexp:split(RevQueryOptions, "="),
-        {list_to_binary(DocId), [list_to_binary(Rev)]}
+        {Pos, RevId} = couch_doc:parse_rev(Rev),
+        {list_to_binary(DocId), {Pos, [RevId]}}
     end.
 

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd_misc_handlers.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd_misc_handlers.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd_misc_handlers.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd_misc_handlers.erl Sat Feb  7 05:23:02 2009
@@ -84,23 +84,20 @@
     Source = proplists:get_value(<<"source">>, Props),
     Target = proplists:get_value(<<"target">>, Props),
     
-    {SrcOpts} = proplists:get_value(<<"source_options">>, Props, {[]}),
-    {SrcHeadersBinary} = proplists:get_value(<<"headers">>, SrcOpts, {[]}),
-    SrcHeaders = [{?b2l(K),(V)} || {K,V} <- SrcHeadersBinary],
+    {Options} = proplists:get_value(<<"options">>, Props, {[]}),
+    {SrcHeadersJson} = proplists:get_value(<<"source_headers">>, Options, {[]}),
+    SrcHeaders = [{?b2l(K),(V)} || {K,V} <- SrcHeadersJson],
     
-    {TgtOpts} = proplists:get_value(<<"target_options">>, Props, {[]}),
-    {TgtHeadersBinary} = proplists:get_value(<<"headers">>, TgtOpts, {[]}),
-    TgtHeaders = [{?b2l(K),(V)} || {K,V} <- TgtHeadersBinary],
+    {TgtHeadersJson} = proplists:get_value(<<"target_headers">>, Options, {[]}),
+    TgtHeaders = [{?b2l(K),(V)} || {K,V} <- TgtHeadersJson],
     
-    {Options} = proplists:get_value(<<"options">>, Props, {[]}),
-    Options2 = [{source_options,
-                    [{headers, SrcHeaders},
-                    {user_ctx, UserCtx}]},
-                {target_options,
-                    [{headers, TgtHeaders},
-                    {user_ctx, UserCtx}]}
-                | Options],
-    {ok, {JsonResults}} = couch_rep:replicate(Source, Target, Options2),
+    {ok, {JsonResults}} = couch_rep:replicate(Source, Target,
+            [{source_options,   % Only one of following is used by api
+                [{headers, SrcHeaders}, % Headers used when src DB is URI
+                {user_ctx, UserCtx}]},  % Ctx used when src DB is local.
+            {target_options,
+                [{headers, TgtHeaders},
+                {user_ctx, UserCtx}]}]),
     send_json(Req, {[{ok, true} | JsonResults]});
 handle_replicate_req(Req) ->
     send_method_not_allowed(Req, "POST").

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd_show.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd_show.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd_show.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd_show.erl Sat Feb  7 05:23:02 2009
@@ -27,10 +27,10 @@
         path_parts=[_, _, DesignName, ShowName, Docid]
     }=Req, Db) ->
     DesignId = <<"_design/", DesignName/binary>>,
-    #doc{body={Props}} = couch_httpd_db:couch_doc_open(Db, DesignId, [], []),
+    #doc{body={Props}} = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
     Lang = proplists:get_value(<<"language">>, Props, <<"javascript">>),
     ShowSrc = get_nested_json_value({Props}, [<<"shows">>, ShowName]),
-    Doc = couch_httpd_db:couch_doc_open(Db, Docid, [], []),
+    Doc = couch_httpd_db:couch_doc_open(Db, Docid, nil, []),
     send_doc_show_response(Lang, ShowSrc, Doc, Req, Db);
 
 handle_doc_show_req(#httpd{
@@ -38,7 +38,7 @@
         path_parts=[_, _, DesignName, ShowName]
     }=Req, Db) ->
     DesignId = <<"_design/", DesignName/binary>>,
-    #doc{body={Props}} = couch_httpd_db:couch_doc_open(Db, DesignId, [], []),
+    #doc{body={Props}} = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
     Lang = proplists:get_value(<<"language">>, Props, <<"javascript">>),
     ShowSrc = get_nested_json_value({Props}, [<<"shows">>, ShowName]),
     send_doc_show_response(Lang, ShowSrc, nil, Req, Db);
@@ -51,7 +51,7 @@
 
 handle_view_list_req(#httpd{method='GET',path_parts=[_, _, DesignName, ListName, ViewName]}=Req, Db) ->
     DesignId = <<"_design/", DesignName/binary>>,
-    #doc{body={Props}} = couch_httpd_db:couch_doc_open(Db, DesignId, [], []),
+    #doc{body={Props}} = couch_httpd_db:couch_doc_open(Db, DesignId, nil, []),
     Lang = proplists:get_value(<<"language">>, Props, <<"javascript">>),
     ListSrc = get_nested_json_value({Props}, [<<"lists">>, ListName]),
     send_view_list_response(Lang, ListSrc, ViewName, DesignId, Req, Db);
@@ -181,12 +181,12 @@
         couch_httpd_external:send_external_response(Req, JsonResp)
     end);
 
-send_doc_show_response(Lang, ShowSrc, #doc{revs=[DocRev|_]}=Doc, #httpd{mochi_req=MReq}=Req, Db) ->
+send_doc_show_response(Lang, ShowSrc, #doc{revs=Revs}=Doc, #httpd{mochi_req=MReq}=Req, Db) ->
     % calculate the etag
     Headers = MReq:get(headers),
     Hlist = mochiweb_headers:to_list(Headers),
     Accept = proplists:get_value('Accept', Hlist),
-    CurrentEtag = couch_httpd:make_etag({Lang, ShowSrc, DocRev, Accept}),
+    CurrentEtag = couch_httpd:make_etag({Lang, ShowSrc, Revs, Accept}),
     % We know our etag now    
     couch_httpd:etag_respond(Req, CurrentEtag, fun() -> 
         ExternalResp = couch_query_servers:render_doc_show(Lang, ShowSrc, 

Modified: couchdb/branches/rep_security/src/couchdb/couch_httpd_view.erl
URL: http://svn.apache.org/viewvc/couchdb/branches/rep_security/src/couchdb/couch_httpd_view.erl?rev=741844&r1=741842&r2=741844&view=diff
==============================================================================
--- couchdb/branches/rep_security/src/couchdb/couch_httpd_view.erl (original)
+++ couchdb/branches/rep_security/src/couchdb/couch_httpd_view.erl Sat Feb  7 05:23:02 2009
@@ -478,14 +478,14 @@
         true ->
             Rev = case Value of
             {Props} ->
-                case is_list(Props) of
-                true ->
-                    proplists:get_value(<<"_rev">>, Props, []);
-                _ ->
-                    []
+                case proplists:get_value(<<"_rev">>, Props) of
+                undefined ->
+                    nil;
+                Rev0 ->
+                    couch_doc:parse_rev(Rev0)
                 end;
             _ ->
-                []
+                nil
             end,
             ?LOG_DEBUG("Include Doc: ~p ~p", [DocId, Rev]),
             case (catch couch_httpd_db:couch_doc_open(Db, DocId, Rev, [])) of