couchdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dav...@apache.org
Subject svn commit: r780197 - in /couchdb/trunk: Makefile.am t/ t/001-load.t t/010-file-basics.t t/011-file-headers.t t/020-btree-basics.t t/021-btree-reductions.t t/030-doc-from-json.t t/031-doc-to-json.t
Date Sat, 30 May 2009 08:02:01 GMT
Author: davisp
Date: Sat May 30 08:02:00 2009
New Revision: 780197

URL: http://svn.apache.org/viewvc?rev=780197&view=rev
Log:
Adding Erlang unit tests.

To run these tests:

    $ git clone git://github.com/ngerakines/etap.git
    $ cd etap
    $ sudo make install
    $ cd /path/to/couchdb
    $ ./bootstrap && ./configure && make check

So far I've worked through most of couch_file.erl, couch_btree.erl, and couch_doc.erl. Tomorrow
I'll be adding coverage reporting so that we can see what code we're actually testing.


Added:
    couchdb/trunk/t/
    couchdb/trunk/t/001-load.t
    couchdb/trunk/t/010-file-basics.t
    couchdb/trunk/t/011-file-headers.t
    couchdb/trunk/t/020-btree-basics.t
    couchdb/trunk/t/021-btree-reductions.t
    couchdb/trunk/t/030-doc-from-json.t
    couchdb/trunk/t/031-doc-to-json.t
Modified:
    couchdb/trunk/Makefile.am

Modified: couchdb/trunk/Makefile.am
URL: http://svn.apache.org/viewvc/couchdb/trunk/Makefile.am?rev=780197&r1=780196&r2=780197&view=diff
==============================================================================
--- couchdb/trunk/Makefile.am (original)
+++ couchdb/trunk/Makefile.am Sat May 30 08:02:00 2009
@@ -36,6 +36,9 @@
 THANKS.gz: $(top_srcdir)/THANKS
 	-gzip -9 < $< > $@
 
+check: all
+	prove t/*.t
+
 dev: all
 	@echo "This command is intended for developers to use;"
 	@echo "it creates development ini files as well as a"

Added: couchdb/trunk/t/001-load.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/001-load.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/001-load.t (added)
+++ couchdb/trunk/t/001-load.t Sat May 30 08:02:00 2009
@@ -0,0 +1,58 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+% Test that we can load each module.
+
+main(_) ->
+    etap:plan(39),
+    Modules = [
+        couch_batch_save,
+        couch_batch_save_sup,
+        couch_btree,
+        couch_config,
+        couch_config_writer,
+        couch_db,
+        couch_db_update_notifier,
+        couch_db_update_notifier_sup,
+        couch_db_updater,
+        couch_doc,
+        couch_event_sup,
+        couch_external_manager,
+        couch_external_server,
+        couch_file,
+        couch_httpd,
+        couch_httpd_db,
+        couch_httpd_external,
+        couch_httpd_misc_handlers,
+        couch_httpd_show,
+        couch_httpd_stats_handlers,
+        couch_httpd_view,
+        couch_key_tree,
+        couch_log,
+        couch_os_process,
+        couch_query_servers,
+        couch_ref_counter,
+        couch_rep,
+        couch_rep_sup,
+        couch_server,
+        couch_server_sup,
+        couch_stats_aggregator,
+        couch_stats_collector,
+        couch_stream,
+        couch_task_status,
+        couch_util,
+        couch_view,
+        couch_view_compactor,
+        couch_view_group,
+        couch_view_updater
+    ],
+
+    lists:foreach(
+        fun(Module) ->
+            etap_can:loaded_ok(
+                Module,
+                lists:concat(["Loaded: ", Module])
+            )
+        end, Modules),
+    etap:end_tests().

Added: couchdb/trunk/t/010-file-basics.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/010-file-basics.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/010-file-basics.t (added)
+++ couchdb/trunk/t/010-file-basics.t Sat May 30 08:02:00 2009
@@ -0,0 +1,83 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+-define(FILE_NAME, "./t/temp.010").
+
+main(_) ->
+    etap:plan(16),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+    
+test() ->
+    etap:is({error, enoent}, couch_file:open("not a real file"),
+        "Opening a non-existant file should return an enoent error."),
+
+    etap:fun_is(
+        fun({ok, _}) -> true; (_) -> false end,
+        couch_file:open(?FILE_NAME ++ ".1", [create, invalid_option]),
+        "Invalid flags to open are ignored."
+    ),
+
+    {ok, Fd} = couch_file:open(?FILE_NAME ++ ".0", [create, overwrite]),
+    etap:ok(is_pid(Fd),
+        "Returned file descriptor is a Pid"),
+    
+    etap:is({ok, 0}, couch_file:bytes(Fd),
+        "Newly created files have 0 bytes."),
+
+    etap:is({ok, 0}, couch_file:append_term(Fd, foo),
+        "Appending a term returns the previous end of file position."),
+
+    {ok, Size} = couch_file:bytes(Fd),
+    etap:is_greater(Size, 0,
+        "Writing a term increased the file size."),
+    
+    etap:is({ok, Size}, couch_file:append_binary(Fd, <<"fancy!">>),
+        "Appending a binary returns the current file size."),
+    
+    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
+        "Reading the first term returns what we wrote: foo"),
+    
+    etap:is({ok, <<"fancy!">>}, couch_file:pread_binary(Fd, Size),
+        "Reading back the binary returns what we wrote: <<\"fancy\">>."),
+        
+    etap:is({ok, <<131, 100, 0, 3, 102, 111, 111>>},
+        couch_file:pread_binary(Fd, 0),
+        "Reading a binary at a term position returns the term as binary."
+    ),
+
+    {ok, BinPos} = couch_file:append_binary(Fd, <<131,100,0,3,102,111,111>>),
+    etap:is({ok, foo}, couch_file:pread_term(Fd, BinPos),
+        "Reading a term from a written binary term representation succeeds."),
+
+    % append_binary == append_iolist?
+    % Possible bug in pread_iolist or iolist() -> append_binary
+    {ok, IOLPos} = couch_file:append_binary(Fd, ["foo", $m, <<"bam">>]),
+    etap:is({ok, [<<"foombam">>]}, couch_file:pread_iolist(Fd, IOLPos),
+        "Reading an results in a binary form of the written iolist()"),
+
+    % XXX: How does on test fsync?
+    etap:is(ok, couch_file:sync(Fd),
+        "Syncing does not cause an error."),
+
+    etap:is(ok, couch_file:truncate(Fd, Size),
+        "Truncating a file succeeds."),
+
+    %etap:is(eof, (catch couch_file:pread_binary(Fd, Size)),
+    %    "Reading data that was truncated fails.")
+    etap:skip(fun() -> ok end,
+        "No idea how to test reading beyond EOF"),
+    
+    etap:is({ok, foo}, couch_file:pread_term(Fd, 0),
+        "Truncating does not affect data located before the truncation mark."),
+
+    etap:is(ok, couch_file:close(Fd),
+        "Files close properly."),
+    ok.
\ No newline at end of file

Added: couchdb/trunk/t/011-file-headers.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/011-file-headers.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/011-file-headers.t (added)
+++ couchdb/trunk/t/011-file-headers.t Sat May 30 08:02:00 2009
@@ -0,0 +1,133 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+-define(FILE_NAME, "./t/temp.011").
+-define(SIZE_BLOCK, 4096). % Need to keep this in sync with couch_file.erl
+
+main(_) ->
+    {S1, S2, S3} = now(),
+    random:seed(S1, S2, S3),
+
+    etap:plan(17),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+    
+test() ->
+    {ok, Fd} = couch_file:open(?FILE_NAME, [create,overwrite]),
+    
+    etap:is({ok, 0}, couch_file:bytes(Fd),
+        "File should be initialized to contain zero bytes."),
+    
+    etap:is(ok, couch_file:write_header(Fd, {<<"some_data">>, 32}),
+        "Writing a header succeeds."),
+    
+    {ok, Size1} = couch_file:bytes(Fd),
+    etap:is_greater(Size1, 0,
+        "Writing a header allocates space in the file."),
+    
+    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
+        "Reading the header returns what we wrote."),
+    
+    etap:is(ok, couch_file:write_header(Fd, [foo, <<"more">>]),
+        "Writing a second header succeeds."),
+    
+    {ok, Size2} = couch_file:bytes(Fd),
+    etap:is_greater(Size2, Size1,
+        "Writing a second header allocates more space."),
+    
+    etap:is({ok, [foo, <<"more">>]}, couch_file:read_header(Fd),
+        "Reading the second header does not return the first header."),
+    
+    % Delete the second header.
+    ok = couch_file:truncate(Fd, Size1),
+    
+    etap:is({ok, {<<"some_data">>, 32}}, couch_file:read_header(Fd),
+        "Reading the header after a truncation returns a previous header."),
+    
+    couch_file:write_header(Fd, [foo, <<"more">>]),
+    etap:is({ok, Size2}, couch_file:bytes(Fd),
+        "Rewriting the same second header returns the same second size."),
+
+    ok = couch_file:close(Fd),
+
+    % Now for the fun stuff. Try corrupting the second header and see
+    % if we recover properly.
+    
+    % Destroy the 0x1 byte that marks a header
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        file:pwrite(RawFd, HeaderPos, <<0>>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the byte marker should read the previous header.")     
+    end),
+    
+    % Corrupt the size.
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        % +1 for 0x1 byte marker
+        file:pwrite(RawFd, HeaderPos+1, <<10/integer>>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the size should read the previous header.")
+    end),
+    
+    % Corrupt the MD5 signature
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        % +5 = +1 for 0x1 byte and +4 for term size.
+        file:pwrite(RawFd, HeaderPos+5, <<"F01034F88D320B22">>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the MD5 signature should read the previous header.")
+    end),
+    
+    % Corrupt the data
+    check_header_recovery(fun(CouchFd, RawFd, Expect, HeaderPos) ->
+        etap:isnt(Expect, couch_file:read_header(CouchFd),
+            "Should return a different header before corruption."),
+        % +21 = +1 for 0x1 byte, +4 for term size and +16 for MD5 sig
+        file:pwrite(RawFd, HeaderPos+21, <<"some data goes here!">>),
+        etap:is(Expect, couch_file:read_header(CouchFd),
+            "Corrupting the header data should read the previous header.")
+    end),
+    
+    ok.
+
+check_header_recovery(CheckFun) ->
+    {ok, Fd} = couch_file:open(?FILE_NAME, [create,overwrite]),
+    {ok, RawFd} = file:open(?FILE_NAME, [read, write, raw, binary]),
+
+    {ok, _} = write_random_data(Fd),
+    ExpectHeader = {some_atom, <<"a binary">>, 756},
+    ok = couch_file:write_header(Fd, ExpectHeader),
+
+    {ok, HeaderPos} = write_random_data(Fd),
+    ok = couch_file:write_header(Fd, {2342, <<"corruption! greed!">>}),
+    
+    CheckFun(Fd, RawFd, {ok, ExpectHeader}, HeaderPos),
+    
+    ok = file:close(RawFd),
+    ok = couch_file:close(Fd),
+    ok.
+
+write_random_data(Fd) ->
+    write_random_data(Fd, 100 + random:uniform(1000)).
+
+write_random_data(Fd, 0) ->
+    {ok, Bytes} = couch_file:bytes(Fd),
+    {ok, (1 + Bytes div ?SIZE_BLOCK) * ?SIZE_BLOCK};
+write_random_data(Fd, N) ->
+    Choices = [foo, bar, <<"bizzingle">>, "bank", ["rough", stuff]],
+    Term = lists:nth(random:uniform(4) + 1, Choices),
+    {ok, _} = couch_file:append_term(Fd, Term),
+    write_random_data(Fd, N-1).
+
+    
\ No newline at end of file

Added: couchdb/trunk/t/020-btree-basics.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/020-btree-basics.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/020-btree-basics.t (added)
+++ couchdb/trunk/t/020-btree-basics.t Sat May 30 08:02:00 2009
@@ -0,0 +1,191 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+-define(FILE_NAME, "./t/temp.020").
+
+-record(btree, {fd, root, extract_kv, assemble_kv, less, reduce}).
+
+main(_) ->
+    etap:plan(48),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+%% @todo Determine if this number should be greater to see if the btree was
+%% broken into multiple nodes. AKA "How do we appropiately detect if multiple
+%% nodes were created."
+test()->
+    Sorted = [{Seq, random:uniform()} || Seq <- lists:seq(1, 1000)],
+    etap:ok(test_kvs(Sorted), "Testing sorted keys"),
+    etap:ok(test_kvs(lists:reverse(Sorted)), "Testing reversed sorted keys"),
+    etap:ok(test_kvs(shuffle(Sorted)), "Testing shuffled keys."),
+    ok.
+
+test_kvs(KeyValues) ->    
+    ReduceFun = fun
+        (reduce, KVs) ->
+            length(KVs);
+        (rereduce, Reds) ->
+            lists:sum(Reds)
+    end,
+
+    Keys = [K || {K, _} <- KeyValues],
+
+    {ok, Fd} = couch_file:open(?FILE_NAME, [create,overwrite]),
+    {ok, Btree} = couch_btree:open(nil, Fd),
+    etap:ok(is_record(Btree, btree), "Created btree is really a btree record"),
+    etap:is(Btree#btree.fd, Fd, "Btree#btree.fd is set correctly."),
+    etap:is(Btree#btree.root, nil, "Btree#btree.root is set correctly."),
+
+    Btree1 = couch_btree:set_options(Btree, [{reduce, ReduceFun}]),
+    etap:is(Btree1#btree.reduce, ReduceFun, "Reduce function was set"),
+    EmptyRes = couch_btree:foldl(Btree1, fun(_, X) -> {ok, X+1} end, 0),
+    etap:is(EmptyRes, {ok, 0}, "Folding over an empty btree"),
+
+    {ok, Btree2} = couch_btree:add_remove(Btree1, KeyValues, []),
+    etap:ok(test_btree(Btree2, KeyValues),
+        "Adding all keys at once returns a complete btree."),
+    
+    etap:fun_is(
+        fun
+            ({ok, {kp_node, _}}) -> true;
+            (_) -> false
+        end,
+        couch_file:pread_term(Fd, element(1, Btree2#btree.root)),
+        "Btree root pointer is a kp_node."
+    ),
+
+    {ok, Btree3} = couch_btree:add_remove(Btree2, [], Keys),
+    etap:ok(test_btree(Btree3, []),
+        "Removing all keys at once returns an empty btree."),
+
+    Btree4 = lists:foldl(fun(KV, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+        BtAcc2
+    end, Btree3, KeyValues),
+    etap:ok(test_btree(Btree4, KeyValues),
+        "Adding all keys one at a time returns a complete btree."),
+
+    Btree5 = lists:foldl(fun({K, _}, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
+        BtAcc2
+    end, Btree4, KeyValues),
+    etap:ok(test_btree(Btree5, []),
+        "Removing all keys one at a time returns an empty btree."),
+
+    KeyValuesRev = lists:reverse(KeyValues),
+    Btree6 = lists:foldl(fun(KV, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+        BtAcc2
+    end, Btree5, KeyValuesRev),
+    etap:ok(test_btree(Btree6, KeyValues),
+        "Adding all keys in reverse order returns a complete btree."),
+
+    {_, Rem2Keys0, Rem2Keys1} = lists:foldl(fun(X, {Count, Left, Right}) ->
+        case Count rem 2 == 0 of
+            true-> {Count+1, [X | Left], Right};
+            false -> {Count+1, Left, [X | Right]}
+        end
+    end, {0, [], []}, KeyValues),
+
+    etap:ok(test_add_remove(Btree6, Rem2Keys0, Rem2Keys1),
+        "Add/Remove every other key."),
+    
+    etap:ok(test_add_remove(Btree6, Rem2Keys1, Rem2Keys0),
+        "Add/Remove opposite every other key."),
+
+    {ok, Btree7} = couch_btree:add_remove(Btree6, [], [K||{K,_}<-Rem2Keys1]),
+    {ok, Btree8} = couch_btree:add_remove(Btree7, [], [K||{K,_}<-Rem2Keys0]),
+    etap:ok(test_btree(Btree8, []),
+        "Removing both halves of every other key returns an empty btree."),
+
+    %% Third chunk (close out)
+    etap:is(couch_file:close(Fd), ok, "closing out"),
+    true.
+
+test_btree(Btree, KeyValues) ->
+    ok = test_key_access(Btree, KeyValues),
+    ok = test_lookup_access(Btree, KeyValues),
+    ok = test_final_reductions(Btree, KeyValues),
+    true.
+
+test_add_remove(Btree, OutKeyValues, RemainingKeyValues) ->
+    Btree2 = lists:foldl(fun({K, _}, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [], [K]),
+        BtAcc2
+    end, Btree, OutKeyValues),
+    true = test_btree(Btree2, RemainingKeyValues),
+
+    Btree3 = lists:foldl(fun(KV, BtAcc) ->
+        {ok, BtAcc2} = couch_btree:add_remove(BtAcc, [KV], []),
+        BtAcc2
+    end, Btree2, OutKeyValues),
+    true = test_btree(Btree3, OutKeyValues ++ RemainingKeyValues).
+
+test_key_access(Btree, List) ->
+    FoldFun = fun(Element, {[HAcc|TAcc], Count}) ->
+        case Element == HAcc of
+            true -> {ok, {TAcc, Count + 1}};
+            _ -> {ok, {TAcc, Count + 1}}
+        end
+    end,
+    Length = length(List),
+    Sorted = lists:sort(List),
+    {ok, {[], Length}} = couch_btree:foldl(Btree, FoldFun, {Sorted, 0}),
+    {ok, {[], Length}} = couch_btree:foldr(Btree, FoldFun, {Sorted, 0}),
+    ok.
+
+test_lookup_access(Btree, KeyValues) ->
+    FoldFun = fun({Key, Value}, {Key, Value}) -> {stop, true} end,
+    lists:foreach(fun({Key, Value}) ->
+        [{ok, {Key, Value}}] = couch_btree:lookup(Btree, [Key]),
+        {ok, true} = couch_btree:foldl(Btree, Key, FoldFun, {Key, Value})
+    end, KeyValues).
+
+test_final_reductions(Btree, KeyValues) ->
+    KVLen = length(KeyValues),
+    FoldLFun = fun(_X, LeadingReds, Acc) ->
+        CountToStart = KVLen div 3 + Acc,
+        CountToStart = couch_btree:final_reduce(Btree, LeadingReds),
+        {ok, Acc+1}
+    end,
+    FoldRFun = fun(_X, LeadingReds, Acc) ->
+        CountToEnd = KVLen - KVLen div 3 + Acc,
+        CountToEnd = couch_btree:final_reduce(Btree, LeadingReds),
+        {ok, Acc+1}
+    end,
+    {LStartKey, _} = case KVLen of
+        0 -> {nil, nil};
+        _ -> lists:nth(KVLen div 3 + 1, lists:sort(KeyValues))
+    end,
+    {RStartKey, _} = case KVLen of
+        0 -> {nil, nil};
+        _ -> lists:nth(KVLen div 3, lists:sort(KeyValues))
+    end,
+    {ok, FoldLRed} = couch_btree:foldl(Btree, LStartKey, FoldLFun, 0),
+    {ok, FoldRRed} = couch_btree:foldr(Btree, RStartKey, FoldRFun, 0),
+    KVLen = FoldLRed + FoldRRed,
+    ok.
+
+shuffle(List) ->
+   randomize(round(math:log(length(List)) + 0.5), List).
+
+randomize(1, List) ->
+   randomize(List);
+randomize(T, List) ->
+    lists:foldl(fun(_E, Acc) ->
+        randomize(Acc)
+    end, randomize(List), lists:seq(1, (T - 1))).
+
+randomize(List) ->
+    D = lists:map(fun(A) ->
+        {random:uniform(), A}
+    end, List),
+    {_, D1} = lists:unzip(lists:keysort(1, D)), 
+    D1.

Added: couchdb/trunk/t/021-btree-reductions.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/021-btree-reductions.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/021-btree-reductions.t (added)
+++ couchdb/trunk/t/021-btree-reductions.t Sat May 30 08:02:00 2009
@@ -0,0 +1,128 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -sasl errlog_type error -boot start_sasl -noshell
+
+-define(FILE_NAME, "./t/temp.021").
+-define(ROWS, 1000).
+
+main(_) ->
+    etap:plan(8),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test()->
+    ReduceFun = fun
+        (reduce, KVs) -> length(KVs);
+        (rereduce, Reds) -> lists:sum(Reds)
+    end,
+    
+    {ok, Fd} = couch_file:open(?FILE_NAME, [create,overwrite]),
+    {ok, Btree} = couch_btree:open(nil, Fd, [{reduce, ReduceFun}]),
+    
+    % Create a list, of {"even", Value} or {"odd", Value} pairs.
+    {_, EvenOddKVs} = lists:foldl(fun(Idx, {Key, Acc}) ->
+        case Key of
+            "even" -> {"odd", [{{Key, Idx}, 1} | Acc]};
+            _ -> {"even", [{{Key, Idx}, 1} | Acc]}
+        end
+    end, {"odd", []}, lists:seq(1, ?ROWS)),
+
+    {ok, Btree2} = couch_btree:add_remove(Btree, EvenOddKVs, []),
+
+    GroupFun = fun({K1, _}, {K2, _}) -> K1 == K2 end,
+    FoldFun = fun(GroupedKey, Unreduced, Acc) ->
+        {ok, [{GroupedKey, couch_btree:final_reduce(Btree2, Unreduced)} | Acc]}
+    end,
+
+    {SK1, EK1} = {{"even", -1}, {"even", foo}},
+    {SK2, EK2} = {{"odd", -1}, {"odd", foo}},
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]}) ->
+                true;
+            (_) ->
+                false
+        end,    
+        couch_btree:fold_reduce(Btree2, nil, nil, GroupFun, FoldFun, []),
+        "Reduction works with no specified direction, startkey, or endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, fwd, nil, nil, GroupFun, FoldFun, []),
+        "Reducing forward works with no startkey or endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"even", _}, ?ROWS div 2}, {{"odd", _}, ?ROWS div 2}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, rev, nil, nil, GroupFun, FoldFun, []),
+        "Reducing backwards works with no startkey or endkey."
+    ),
+    
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, ?ROWS div 2}, {{"even", _}, ?ROWS div 2}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, fwd, SK1, EK2, GroupFun, FoldFun, []),
+        "Reducing works over the entire range with startkey and endkey set."
+    ),
+    
+    etap:fun_is(
+        fun
+            ({ok, [{{"even", _}, ?ROWS div 2}]}) -> true;
+            (_) -> false
+        end,
+        couch_btree:fold_reduce(Btree2, fwd, SK1, EK1, GroupFun, FoldFun, []),
+        "Reducing foward over first half works with a startkey and endkey."
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, ?ROWS div 2}]}) -> true;
+            (_) -> false
+        end,
+        couch_btree:fold_reduce(Btree2, fwd, SK2, EK2, GroupFun, FoldFun, []),
+        "Reducing foward over second half works with second startkey and endkey"
+    ),
+
+    etap:fun_is(
+        fun
+            ({ok, [{{"odd", _}, ?ROWS div 2}]}) -> true;
+            (_) -> false
+        end,
+        couch_btree:fold_reduce(Btree2, rev, EK2, SK2, GroupFun, FoldFun, []),
+        "Reducing in reverse works after swapping the startkey and endkey."
+    ),
+    
+    etap:fun_is(
+        fun
+            ({ok, [{{"even", _}, ?ROWS div 2}, {{"odd", _}, ?ROWS div 2}]}) ->
+                true;
+            (_) ->
+                false
+        end,
+        couch_btree:fold_reduce(Btree2, rev, EK2, SK1, GroupFun, FoldFun, []),
+        "Reducing in reverse results in reversed accumulator."
+    ),
+
+    couch_file:close(Fd).

Added: couchdb/trunk/t/030-doc-from-json.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/030-doc-from-json.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/030-doc-from-json.t (added)
+++ couchdb/trunk/t/030-doc-from-json.t Sat May 30 08:02:00 2009
@@ -0,0 +1,208 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
+
+
+%% XXX: Figure out how to -include("couch_db.hrl")
+-record(doc, {id= <<"">>, revs={0, []}, body={[]},
+            attachments=[], deleted=false, meta=[]}).
+
+main(_) ->
+    etap:plan(26),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+    
+test() ->
+    ok = test_from_json_success(),
+    ok = test_from_json_errors(),
+    ok.
+
+test_from_json_success() ->
+    Cases = [
+        {
+            {[]},
+            #doc{},
+            "Return an empty document for an empty JSON object."
+        },
+        {
+            {[{<<"_id">>, <<"zing!">>}]},
+            #doc{id= <<"zing!">>},
+            "Parses document ids."
+        },
+        {
+            {[{<<"_id">>, <<"_design/foo">>}]},
+            #doc{id= <<"_design/foo">>},
+            "_design/document ids."
+        },
+        {
+            {[{<<"_id">>, <<"_local/bam">>}]},
+            #doc{id= <<"_local/bam">>},
+            "_local/document ids."
+        },
+        {
+            {[{<<"_rev">>, <<"4-230234">>}]},
+            #doc{revs={4, [<<"230234">>]}},
+            "_rev stored in revs."        
+        },
+        {
+            {[{<<"soap">>, 35}]},
+            #doc{body={[{<<"soap">>, 35}]}},
+            "Non underscore prefixed fields stored in body."
+        },
+        {
+            {[{<<"_attachments">>, {[
+                {<<"my_attachment.fu">>, {[
+                    {<<"stub">>, true},
+                    {<<"content_type">>, <<"application/awesome">>},
+                    {<<"length">>, 45}
+                ]}},
+                {<<"noahs_private_key.gpg">>, {[
+                    {<<"data">>, <<"SSBoYXZlIGEgcGV0IGZpc2gh">>},
+                    {<<"content_type">>, <<"application/pgp-signature">>}
+                ]}}
+            ]}}]},
+            #doc{attachments=[
+                {<<"my_attachment.fu">>,
+                    {stub, <<"application/awesome">>, 45}},
+                {<<"noahs_private_key.gpg">>,
+                    {<<"application/pgp-signature">>, <<"I have a pet fish!">>}}
+            ]},
+            "Attachments are parsed correctly."
+        },
+        {
+            {[{<<"_deleted">>, true}]},
+            #doc{deleted=true},
+            "_deleted controls the deleted field."
+        },
+        {
+            {[{<<"_deleted">>, false}]},
+            #doc{},
+            "{\"_deleted\": false} is ok."
+        },
+        {
+            {[
+                {<<"_rev">>, <<"6-something">>},
+                {<<"_revisions">>, {[
+                    {<<"start">>, 4},
+                    {<<"ids">>, [<<"foo1">>, <<"phi3">>,
<<"omega">>]}
+                ]}}
+            ]},
+            #doc{revs={4, [<<"foo1">>, <<"phi3">>, <<"omega">>]}},
+            "_revisions attribute are preferred to _rev."
+        },
+        {
+            {[{<<"_revs_info">>, dropping}]},
+            #doc{},
+            "Drops _revs_info."
+        },
+        {
+            {[{<<"_local_seq">>, dropping}]},
+            #doc{},
+            "Drops _local_seq."        
+        },
+        {
+            {[{<<"_conflicts">>, dropping}]},
+            #doc{},
+            "Drops _conflicts."
+        },
+        {
+            {[{<<"_deleted_conflicts">>, dropping}]},
+            #doc{},
+            "Drops _deleted_conflicts."
+        }
+    ],
+    
+    lists:foreach(fun({EJson, Expect, Mesg}) ->
+        etap:is(couch_doc:from_json_obj(EJson), Expect, Mesg)
+    end, Cases),
+    ok.
+
+test_from_json_errors() ->
+    Cases = [
+        {
+            [],
+            {bad_request, "Document must be a JSON object"},
+            "arrays are invalid"
+        },
+        {
+            4,
+            {bad_request, "Document must be a JSON object"},
+            "integers are invalid"
+        },
+        {
+            true,
+            {bad_request, "Document must be a JSON object"},
+            "literals are invalid"
+        },
+        {
+            {[{<<"_id">>, {[{<<"foo">>, 5}]}}]},
+            {bad_request, <<"Document id must be a string">>},
+            "Document id must be a string."
+        },
+        {
+            {[{<<"_id">>, <<"_random">>}]},
+            {bad_request,
+                <<"Only reserved document ids may start with underscore.">>},
+            "Disallow arbitrary underscore prefixed docids."
+        },
+        {
+            {[{<<"_rev">>, 5}]},
+            {bad_request, <<"Invalid rev format">>},
+            "_rev must be a string"
+        },
+        {
+            {[{<<"_rev">>, "foobar"}]},
+            {bad_request, <<"Invalid rev format">>},
+            "_rev must be %d-%s"
+        },
+        {
+            {[{<<"_rev">>, "foo-bar"}]},
+            "Error if _rev's integer expection is broken."
+        },
+        {
+            {[{<<"_revisions">>, {[{<<"start">>, true}]}}]},
+            {doc_validation, "_revisions.start isn't an integer."},
+            "_revisions.start must be an integer."
+        },
+        {
+            {[{<<"_revisions">>, {[
+                {<<"start">>, 0},
+                {<<"ids">>, 5}
+            ]}}]},
+            {doc_validation, "_revisions.ids isn't a array."},
+            "_revions.ids must be a list."
+        },
+        {
+            {[{<<"_revisions">>, {[
+                {<<"start">>, 0},
+                {<<"ids">>, [5]}
+            ]}}]},
+            {doc_validation, "RevId isn't a string"},
+            "Revision ids must be strings."
+        },
+        {
+            {[{<<"_something">>, 5}]},
+            {doc_validation, <<"Bad special document member: _something">>},
+            "Underscore prefix fields are reserved."
+        }
+    ],
+    
+    lists:foreach(fun
+        ({EJson, Expect, Mesg}) ->
+            Error = (catch couch_doc:from_json_obj(EJson)),
+            etap:is(Error, Expect, Mesg);
+        ({EJson, Mesg}) ->
+            try
+                couch_doc:from_json_obj(EJson),
+                etap:ok(false, "Conversion failed to raise an exception.")
+            catch
+                _:_ -> etap:ok(true, Mesg)
+            end
+    end, Cases),
+    ok.
\ No newline at end of file

Added: couchdb/trunk/t/031-doc-to-json.t
URL: http://svn.apache.org/viewvc/couchdb/trunk/t/031-doc-to-json.t?rev=780197&view=auto
==============================================================================
--- couchdb/trunk/t/031-doc-to-json.t (added)
+++ couchdb/trunk/t/031-doc-to-json.t Sat May 30 08:02:00 2009
@@ -0,0 +1,155 @@
+#!/usr/bin/env escript
+%% -*- erlang -*-
+%%! -pa ./src/couchdb -pa ./src/mochiweb -sasl errlog_type false -noshell
+
+
+%% XXX: Figure out how to -include("couch_db.hrl")
+-record(doc, {id= <<"">>, revs={0, []}, body={[]},
+            attachments=[], deleted=false, meta=[]}).
+
+main(_) ->
+    etap:plan(unknown),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail()
+    end,
+    ok.
+
+test() ->
+    ok = test_to_json_success(),
+    ok = test_to_json_errors(),
+    ok.
+    
+test_to_json_success() ->
+    Cases = [
+        {
+            #doc{},
+            {[{<<"_id">>, <<"">>}]},
+            "Empty docs are {\"_id\": \"\"}"
+        },
+        {
+            #doc{id= <<"foo">>},
+            {[{<<"_id">>, <<"foo">>}]},
+            "_id is added."
+        },
+        {
+            #doc{revs={5, ["foo"]}},
+            {[{<<"_id">>, <<>>}, {<<"_rev">>, <<"5-foo">>}]},
+            "_rev is added."
+        },
+        {
+            [revs],
+            #doc{revs={5, [<<"first">>, <<"second">>]}},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_rev">>, <<"5-first">>},
+                {<<"_revisions">>, {[
+                    {<<"start">>, 5},
+                    {<<"ids">>, [<<"first">>, <<"second">>]}
+                ]}}
+            ]},
+            "_revisions include with revs option"
+        },
+        {
+            #doc{body={[{<<"foo">>, <<"bar">>}]}},
+            {[{<<"_id">>, <<>>}, {<<"foo">>, <<"bar">>}]},
+            "Arbitrary fields are added."
+        },
+        {
+            #doc{deleted=true, body={[{<<"foo">>, <<"bar">>}]}},
+            {[{<<"_id">>, <<>>}, {<<"_deleted">>, true}]},
+            "Deleted docs drop body members."
+        },
+        {
+            #doc{meta=[
+                {revs_info, 4, [{<<"fin">>, deleted}, {<<"zim">>,
missing}]}
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_revs_info">>, [
+                    {[{<<"rev">>, <<"4-fin">>}, {<<"status">>,
<<"deleted">>}]},
+                    {[{<<"rev">>, <<"3-zim">>}, {<<"status">>,
<<"missing">>}]}
+                ]}
+            ]},
+            "_revs_info field is added correctly."
+        },
+        {
+            #doc{meta=[{local_seq, 5}]},
+            {[{<<"_id">>, <<>>}, {<<"_local_seq">>, 5}]},
+            "_local_seq is added as an integer."
+        },
+        {
+            #doc{meta=[{conflicts, [{3, <<"yep">>}, {1, <<"snow">>}]}]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_conflicts">>, [<<"3-yep">>, <<"1-snow">>]}
+            ]},
+            "_conflicts is added as an array of strings."
+        },
+        {
+            #doc{meta=[{deleted_conflicts, [{10923, <<"big_cowboy_hat">>}]}]},
+            {[
+                {<<"_id">>, <<>>}, 
+                {<<"_deleted_conflicts">>, [<<"10923-big_cowboy_hat">>]}
+            ]},
+            "_deleted_conflicsts is added as an array of strings."
+        },
+        {
+            #doc{attachments=[
+                {<<"big.xml">>, {<<"xml/sucks">>, {fun() -> ok
end, 400}}},
+                {<<"fast.json">>, {<<"json/ftw">>, <<"{\"so\":
\"there!\"}">>}}
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_attachments">>, {[
+                    {<<"big.xml">>, {[
+                        {<<"stub">>, true},
+                        {<<"content_type">>, <<"xml/sucks">>},
+                        {<<"length">>, 400}
+                    ]}},
+                    {<<"fast.json">>, {[
+                        {<<"stub">>, true},
+                        {<<"content_type">>, <<"json/ftw">>},
+                        {<<"length">>, 16}
+                    ]}}
+                ]}}
+            ]},
+            "Attachments attached as stubs only include a length."
+        },
+        {
+            [attachments],
+            #doc{attachments=[
+                {<<"stuff.txt">>,
+                    {<<"text/plain">>, {fun() -> <<"diet pepsi">>
end, 10}}},
+                {<<"food.now">>, {<<"application/food">>, <<"sammich">>}}
+            ]},
+            {[
+                {<<"_id">>, <<>>},
+                {<<"_attachments">>, {[
+                    {<<"stuff.txt">>, {[
+                        {<<"content_type">>, <<"text/plain">>},
+                        {<<"data">>, <<"ZGlldCBwZXBzaQ==">>}
+                    ]}},
+                    {<<"food.now">>, {[
+                        {<<"content_type">>, <<"application/food">>},
+                        {<<"data">>, <<"c2FtbWljaA==">>}
+                    ]}}
+                ]}}
+            ]},
+            "Attachments included inline with attachments option."
+        }
+    ],
+
+    lists:foreach(fun
+        ({Doc, EJson, Mesg}) ->
+            etap:is(couch_doc:to_json_obj(Doc, []), EJson, Mesg);
+        ({Options, Doc, EJson, Mesg}) ->
+            etap:is(couch_doc:to_json_obj(Doc, Options), EJson, Mesg)
+    end, Cases),
+    ok.
+
+test_to_json_errors() ->
+    ok.
\ No newline at end of file



Mime
View raw message