avro-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r794110 - in /hadoop/avro/trunk: ./ src/c/ src/c/json/fail/ src/c/json/pass/
Date Tue, 14 Jul 2009 22:59:24 GMT
Author: cutting
Date: Tue Jul 14 22:59:22 2009
New Revision: 794110

URL: http://svn.apache.org/viewvc?rev=794110&view=rev
Log:
AVRO-60.  Fix C JSON parser to correctly process escapes and multi-byte characters.  Add tests.
 Contributed by Matt Massie.

Added:
    hadoop/avro/trunk/src/c/json/fail/bad_value
    hadoop/avro/trunk/src/c/json/fail/colon_instead_of_comma
    hadoop/avro/trunk/src/c/json/fail/comma_after_close
    hadoop/avro/trunk/src/c/json/fail/comma_instead_of_colon
    hadoop/avro/trunk/src/c/json/fail/double_colon
    hadoop/avro/trunk/src/c/json/fail/extra_close
    hadoop/avro/trunk/src/c/json/fail/extra_data_after_object
    hadoop/avro/trunk/src/c/json/fail/illegal_expression
    hadoop/avro/trunk/src/c/json/fail/illegal_invocation
    hadoop/avro/trunk/src/c/json/fail/invalid_number
    hadoop/avro/trunk/src/c/json/fail/invalid_number2
    hadoop/avro/trunk/src/c/json/fail/invalid_number3
    hadoop/avro/trunk/src/c/json/fail/invalid_number4
    hadoop/avro/trunk/src/c/json/fail/json_text_not_array_or_object
    hadoop/avro/trunk/src/c/json/fail/line_break_in_string
    hadoop/avro/trunk/src/c/json/fail/line_break_in_string2
    hadoop/avro/trunk/src/c/json/fail/missing_colon
    hadoop/avro/trunk/src/c/json/fail/naked_array_value
    hadoop/avro/trunk/src/c/json/fail/object_array_mismatch
    hadoop/avro/trunk/src/c/json/fail/single_quote
    hadoop/avro/trunk/src/c/json/fail/string_bad_u1_value
    hadoop/avro/trunk/src/c/json/fail/string_bad_u2_value
    hadoop/avro/trunk/src/c/json/fail/string_bad_u3_value
    hadoop/avro/trunk/src/c/json/fail/string_bad_u4_value
    hadoop/avro/trunk/src/c/json/fail/string_illegal_escape
    hadoop/avro/trunk/src/c/json/fail/tab_char_in_string
    hadoop/avro/trunk/src/c/json/fail/tab_character_in_string
    hadoop/avro/trunk/src/c/json/fail/unquoted_object_key
    hadoop/avro/trunk/src/c/json/pass/deep
    hadoop/avro/trunk/src/c/json/pass/json_org_example1
    hadoop/avro/trunk/src/c/json/pass/json_org_example2
    hadoop/avro/trunk/src/c/json/pass/json_org_example3
    hadoop/avro/trunk/src/c/json/pass/json_org_test1
    hadoop/avro/trunk/src/c/json/pass/object_with_duplicate_keys
    hadoop/avro/trunk/src/c/json/pass/rfc_example
    hadoop/avro/trunk/src/c/json/pass/rfc_example2
    hadoop/avro/trunk/src/c/json/pass/string_u_value
Removed:
    hadoop/avro/trunk/src/c/json/fail/array_with_start_coma
    hadoop/avro/trunk/src/c/json/fail/object_with_start_coma
Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/src/c/Makefile.am
    hadoop/avro/trunk/src/c/avro.c
    hadoop/avro/trunk/src/c/avro.h
    hadoop/avro/trunk/src/c/json.c
    hadoop/avro/trunk/src/c/json.h
    hadoop/avro/trunk/src/c/json/pass/object_with_multiple_members
    hadoop/avro/trunk/src/c/json_schema.y
    hadoop/avro/trunk/src/c/json_tokenizer.c
    hadoop/avro/trunk/src/c/json_tokenizer.h
    hadoop/avro/trunk/src/c/test_avro_bytes.c
    hadoop/avro/trunk/src/c/test_avro_float_double.c
    hadoop/avro/trunk/src/c/test_avro_raw.c
    hadoop/avro/trunk/src/c/test_avro_string.c
    hadoop/avro/trunk/src/c/test_avro_zigzag.c
    hadoop/avro/trunk/src/c/test_json_parser.c

Modified: hadoop/avro/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Tue Jul 14 22:59:22 2009
@@ -11,6 +11,9 @@
     AVRO-71.  C++: make deserializer more generic.  (Scott Banachowski
     via cutting)
 
+    AVRO-60. Fix C JSON parser to correctly handle escapes and
+    multi-byte characters.  Add tests.  (Matt Massie via cutting)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/avro/trunk/src/c/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/Makefile.am?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/Makefile.am (original)
+++ hadoop/avro/trunk/src/c/Makefile.am Tue Jul 14 22:59:22 2009
@@ -1,7 +1,7 @@
 AM_CFLAGS=$(APR_CFLAGS) $(APR_INCLUDES) $(APU_INCLUDES) -Wall -pedantic
 C_DOCS_OUTPUT ?= "docs/dox"
 
-EXTRA_DIST=json_schema.y
+EXTRA_DIST=json_schema.y lemon.c lempar.c
 
 include_HEADERS = avro.h
 
@@ -49,17 +49,19 @@
 # Run indent on all files. 
 # NOTE: Don't indent avro.h (messes up docs) or lemon.c,lempar.c (messes up parser)
 pretty:
+	pushd $(top_srcdir); \
 	for file in *.[c,h]; \
 	do \
 	if [ $$file = "avro.h" -o $$file = "lemon.c" -o $$file = "lempar.c" ]; then \
 		continue;\
 	fi;\
 	indent $$file; \
-	done
+	done;\
+	popd;
 
 docs:
 	@(cat docs/doxygen.conf; echo "OUTPUT_DIRECTORY=$(C_DOCS_OUTPUT)")| doxygen -
 
 .PHONY: docs 
 
-CLEANFILES=*~ trace.txt json_schema.out
+CLEANFILES=$(top_srcdir)/*~ trace.txt json_schema.out lemon

Modified: hadoop/avro/trunk/src/c/avro.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/avro.c (original)
+++ hadoop/avro/trunk/src/c/avro.c Tue Jul 14 22:59:22 2009
@@ -17,6 +17,8 @@
 under the License.
 */
 
+#include <stdlib.h>
+#include <locale.h>
 #include "avro.h"
 
 avro_status_t
@@ -53,3 +55,18 @@
     }
   return AVRO_OK;
 }
+
+avro_status_t
+avro_initialize (void)
+{
+  apr_initialize ();
+  atexit (apr_terminate);
+
+  /* Set the locale to UTF-8 */
+  if (!setlocale (LC_CTYPE, "en_US.UTF-8"))
+    {
+      return AVRO_FAILURE;
+    }
+
+  return AVRO_OK;
+}

Modified: hadoop/avro/trunk/src/c/avro.h
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/avro.h?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/avro.h (original)
+++ hadoop/avro/trunk/src/c/avro.h Tue Jul 14 22:59:22 2009
@@ -109,6 +109,11 @@
 #define AVRO_PUTBYTES(avro, addr, len)     \
 (*(avro)->a_ops->a_putbytes)(avro, addr, len)
 
+/** Initialize the AVRO library 
+@return The Avro status
+*/
+avro_status_t avro_initialize(void);
+
 /** Create a memory-backed Avro handle 
 @param avro Pointer to handle that will be initialized
 @param pool Pool used for allocating dynamic data structures.

Modified: hadoop/avro/trunk/src/c/json.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/json.c (original)
+++ hadoop/avro/trunk/src/c/json.c Tue Jul 14 22:59:22 2009
@@ -43,7 +43,7 @@
       fprintf (file, "???");
       return;
     case JSON_STRING:
-      fprintf (file, "\"%s\"", value->string_value);
+      fprintf (file, "\"%ls\"", value->string_value);
       return;
     case JSON_NUMBER:
       fprintf (file, "%E", value->number_value);
@@ -76,7 +76,7 @@
     case JSON_OBJECT:
       {
 	apr_hash_index_t *hi;
-	char *key;
+	wchar_t *key;
 	apr_ssize_t len;
 	JSON_value *member_value;
 
@@ -91,7 +91,7 @@
 	      }
 	    apr_hash_this (hi, (void *) &key, &len, (void *) &member_value);
 	    ws_depth (file, *depth);
-	    fprintf (file, "\"%s\" :", key);
+	    fprintf (file, "\"%ls\" :", key);
 	    JSON_print_private (file, member_value, depth);
 	  }
 	fprintf (file, "\n");
@@ -131,10 +131,11 @@
 }
 
 static JSON_value *
-JSON_parse_inner (void *jsonp, apr_pool_t * pool, char *text, int text_len)
+JSON_parse_inner (void *jsonp, apr_pool_t * pool, wchar_t * mb_text,
+		  size_t mb_len)
 {
-  int len;
-  char *cur, *text_end;
+  int i, len;
+  wchar_t *cur, *text_end;
   JSON_value *value = NULL;
   JSON_ctx ctx;
 
@@ -144,7 +145,7 @@
   ctx.result = NULL;
 
   /* Loop through the input */
-  for (cur = text, text_end = text + text_len; cur < text_end; cur += len)
+  for (cur = mb_text, text_end = mb_text + mb_len; cur < text_end; cur += len)
     {
       int tokenType;
       double number;
@@ -170,12 +171,60 @@
 	  break;
 
 	case TK_STRING:
-	  value = JSON_value_new (pool, JSON_STRING);
-	  value->string_value = apr_palloc (pool, len + 1);
-	  /* Take off the quotes */
-	  memcpy (value->string_value, cur + 1, len - 1);
-	  /* TODO: e.g. substitute \" for " */
-	  value->string_value[len - 2] = '\0';
+	  {
+	    wchar_t *p, *q;
+
+	    value = JSON_value_new (pool, JSON_STRING);
+	    /* This allocates the maximum we need */
+	    value->string_value =
+	      (wchar_t *) apr_palloc (pool, (len + 1) * sizeof (wchar_t));
+
+	    for (p = cur + 1, q = value->string_value; p < cur + len - 1; p++)
+	      {
+		if (*p == '\\')
+		  {
+		    p++;
+		    switch (*p)
+		      {
+		      case '"':
+		      case '\\':
+		      case '/':
+			*(q++) = *p;
+			break;
+		      case 'b':
+			*(q++) = '\b';
+			break;
+		      case 'f':
+			*(q++) = '\f';
+			break;
+		      case 'n':
+			*(q++) = '\n';
+			break;
+		      case 'r':
+			*(q++) = '\r';
+			break;
+		      case 't':
+			*(q++) = '\t';
+			break;
+		      case 'u':
+			{
+			  wchar_t hex[] = { 0, 0, 0, 0, 0 };
+			  for (i = 0; i < 4; i++)
+			    {
+			      hex[i] = *(++p);
+			    }
+			  *(q++) = wcstol (hex, NULL, 16);
+			}
+			break;
+		      }
+		  }
+		else
+		  {
+		    *(q++) = *p;
+		  }
+	      }
+	    *(q++) = '\0';
+	  }
 	  break;
 
 	case TK_NUMBER:
@@ -208,14 +257,37 @@
 JSON_value *
 JSON_parse (apr_pool_t * pool, char *text, int text_len)
 {
-  JSON_value *value;
+  JSON_value *value = NULL;
+  size_t mb_len;
+
   /* Too bad I can't use the pool here... */
   void *jsonp = JSONParserAlloc (malloc);
   if (jsonp == NULL)
     {
       return NULL;
     }
-  value = JSON_parse_inner (jsonp, pool, text, text_len);
+
+  mb_len = mbstowcs (NULL, text, 0);
+  if (mb_len > 0)
+    {
+      apr_status_t status;
+      apr_pool_t *subpool;
+      status = apr_pool_create (&subpool, pool);
+      if (status == APR_SUCCESS)
+	{
+	  wchar_t *mb_text =
+	    (wchar_t *) apr_palloc (subpool, sizeof (wchar_t) * mb_len);
+	  if (mb_text)
+	    {
+	      if (mbstowcs (mb_text, text, mb_len) == mb_len)
+		{
+		  value = JSON_parse_inner (jsonp, pool, mb_text, mb_len);
+		}
+	    }
+	  apr_pool_destroy (subpool);
+	}
+    }
+
   JSONParserFree (jsonp, free);
   return value;
 }

Modified: hadoop/avro/trunk/src/c/json.h
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json.h?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/json.h (original)
+++ hadoop/avro/trunk/src/c/json.h Tue Jul 14 22:59:22 2009
@@ -19,6 +19,7 @@
 #ifndef JSON_H
 #define JSON_H
 
+#include <wchar.h>
 #include <apr.h>
 #include <apr_pools.h>
 #include <apr_tables.h>
@@ -43,7 +44,7 @@
   {
     apr_hash_t *object;
     apr_array_header_t *array;
-    char *z;
+    wchar_t *z;
     double number;
     int boolean;
   } value_u;

Added: hadoop/avro/trunk/src/c/json/fail/bad_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/bad_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/bad_value (added)
+++ hadoop/avro/trunk/src/c/json/fail/bad_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["Bad value", truth]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/colon_instead_of_comma
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/colon_instead_of_comma?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/colon_instead_of_comma (added)
+++ hadoop/avro/trunk/src/c/json/fail/colon_instead_of_comma Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["Colon instead of comma": false]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/comma_after_close
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/comma_after_close?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/comma_after_close (added)
+++ hadoop/avro/trunk/src/c/json/fail/comma_after_close Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["Comma after the close"],
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/comma_instead_of_colon
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/comma_instead_of_colon?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/comma_instead_of_colon (added)
+++ hadoop/avro/trunk/src/c/json/fail/comma_instead_of_colon Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Comma instead of colon", null}
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/double_colon
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/double_colon?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/double_colon (added)
+++ hadoop/avro/trunk/src/c/json/fail/double_colon Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Double colon":: null}
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/extra_close
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/extra_close?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/extra_close (added)
+++ hadoop/avro/trunk/src/c/json/fail/extra_close Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["Extra close"]]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/extra_data_after_object
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/extra_data_after_object?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/extra_data_after_object (added)
+++ hadoop/avro/trunk/src/c/json/fail/extra_data_after_object Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Extra value after object close": true} "This isn't valid JSON"

Added: hadoop/avro/trunk/src/c/json/fail/illegal_expression
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/illegal_expression?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/illegal_expression (added)
+++ hadoop/avro/trunk/src/c/json/fail/illegal_expression Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Illegal": 1 + 2}

Added: hadoop/avro/trunk/src/c/json/fail/illegal_invocation
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/illegal_invocation?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/illegal_invocation (added)
+++ hadoop/avro/trunk/src/c/json/fail/illegal_invocation Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Illegal invocation": alert()}

Added: hadoop/avro/trunk/src/c/json/fail/invalid_number
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/invalid_number?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/invalid_number (added)
+++ hadoop/avro/trunk/src/c/json/fail/invalid_number Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+[0e]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/invalid_number2
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/invalid_number2?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/invalid_number2 (added)
+++ hadoop/avro/trunk/src/c/json/fail/invalid_number2 Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+[0e+]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/invalid_number3
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/invalid_number3?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/invalid_number3 (added)
+++ hadoop/avro/trunk/src/c/json/fail/invalid_number3 Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Comma instead if closing brace": true,
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/invalid_number4
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/invalid_number4?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/invalid_number4 (added)
+++ hadoop/avro/trunk/src/c/json/fail/invalid_number4 Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+[0e+-1]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/json_text_not_array_or_object
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/json_text_not_array_or_object?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/json_text_not_array_or_object (added)
+++ hadoop/avro/trunk/src/c/json/fail/json_text_not_array_or_object Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+"JSON text should be an array or object"

Added: hadoop/avro/trunk/src/c/json/fail/line_break_in_string
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/line_break_in_string?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/line_break_in_string (added)
+++ hadoop/avro/trunk/src/c/json/fail/line_break_in_string Tue Jul 14 22:59:22 2009
@@ -0,0 +1,2 @@
+["line
+break"]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/line_break_in_string2
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/line_break_in_string2?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/line_break_in_string2 (added)
+++ hadoop/avro/trunk/src/c/json/fail/line_break_in_string2 Tue Jul 14 22:59:22 2009
@@ -0,0 +1,2 @@
+["line\
+break"]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/missing_colon
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/missing_colon?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/missing_colon (added)
+++ hadoop/avro/trunk/src/c/json/fail/missing_colon Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{"Missing colon" null}
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/naked_array_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/naked_array_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/naked_array_value (added)
+++ hadoop/avro/trunk/src/c/json/fail/naked_array_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+[\naked]

Added: hadoop/avro/trunk/src/c/json/fail/object_array_mismatch
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/object_array_mismatch?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/object_array_mismatch (added)
+++ hadoop/avro/trunk/src/c/json/fail/object_array_mismatch Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["mismatch"}
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/single_quote
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/single_quote?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/single_quote (added)
+++ hadoop/avro/trunk/src/c/json/fail/single_quote Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+['single quote']
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/string_bad_u1_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/string_bad_u1_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/string_bad_u1_value (added)
+++ hadoop/avro/trunk/src/c/json/fail/string_bad_u1_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1,2 @@
+{ "good" : "\uDEAD",
+  "bad" : "\u!BAD" }

Added: hadoop/avro/trunk/src/c/json/fail/string_bad_u2_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/string_bad_u2_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/string_bad_u2_value (added)
+++ hadoop/avro/trunk/src/c/json/fail/string_bad_u2_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1,2 @@
+{ "good" : "\uDEAD",
+  "bad" : "\uB!AD" }

Added: hadoop/avro/trunk/src/c/json/fail/string_bad_u3_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/string_bad_u3_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/string_bad_u3_value (added)
+++ hadoop/avro/trunk/src/c/json/fail/string_bad_u3_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1,2 @@
+{ "good" : "\uDEAD",
+  "bad" : "\uBA!D" }

Added: hadoop/avro/trunk/src/c/json/fail/string_bad_u4_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/string_bad_u4_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/string_bad_u4_value (added)
+++ hadoop/avro/trunk/src/c/json/fail/string_bad_u4_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1,2 @@
+{ "good" : "\uDEAD",
+  "bad" : "\uBAD!" }

Added: hadoop/avro/trunk/src/c/json/fail/string_illegal_escape
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/string_illegal_escape?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/string_illegal_escape (added)
+++ hadoop/avro/trunk/src/c/json/fail/string_illegal_escape Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{ "bad" : "\z" }

Added: hadoop/avro/trunk/src/c/json/fail/tab_char_in_string
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/tab_char_in_string?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/tab_char_in_string (added)
+++ hadoop/avro/trunk/src/c/json/fail/tab_char_in_string Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["tab\   character\   in\  string\  "]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/tab_character_in_string
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/tab_character_in_string?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/tab_character_in_string (added)
+++ hadoop/avro/trunk/src/c/json/fail/tab_character_in_string Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+["	tab	character	in	string	"]
\ No newline at end of file

Added: hadoop/avro/trunk/src/c/json/fail/unquoted_object_key
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/fail/unquoted_object_key?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/fail/unquoted_object_key (added)
+++ hadoop/avro/trunk/src/c/json/fail/unquoted_object_key Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{unquoted_key: "<- Keys should be quoted"}

Added: hadoop/avro/trunk/src/c/json/pass/deep
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/deep?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/deep (added)
+++ hadoop/avro/trunk/src/c/json/pass/deep Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Think deep thoughts"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]

Added: hadoop/avro/trunk/src/c/json/pass/json_org_example1
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/json_org_example1?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/json_org_example1 (added)
+++ hadoop/avro/trunk/src/c/json/pass/json_org_example1 Tue Jul 14 22:59:22 2009
@@ -0,0 +1,88 @@
+{"web-app": {
+  "servlet": [   
+    {
+      "servlet-name": "cofaxCDS",
+      "servlet-class": "org.cofax.cds.CDSServlet",
+      "init-param": {
+        "configGlossary:installationAt": "Philadelphia, PA",
+        "configGlossary:adminEmail": "ksm@pobox.com",
+        "configGlossary:poweredBy": "Cofax",
+        "configGlossary:poweredByIcon": "/images/cofax.gif",
+        "configGlossary:staticPath": "/content/static",
+        "templateProcessorClass": "org.cofax.WysiwygTemplate",
+        "templateLoaderClass": "org.cofax.FilesTemplateLoader",
+        "templatePath": "templates",
+        "templateOverridePath": "",
+        "defaultListTemplate": "listTemplate.htm",
+        "defaultFileTemplate": "articleTemplate.htm",
+        "useJSP": false,
+        "jspListTemplate": "listTemplate.jsp",
+        "jspFileTemplate": "articleTemplate.jsp",
+        "cachePackageTagsTrack": 200,
+        "cachePackageTagsStore": 200,
+        "cachePackageTagsRefresh": 60,
+        "cacheTemplatesTrack": 100,
+        "cacheTemplatesStore": 50,
+        "cacheTemplatesRefresh": 15,
+        "cachePagesTrack": 200,
+        "cachePagesStore": 100,
+        "cachePagesRefresh": 10,
+        "cachePagesDirtyRead": 10,
+        "searchEngineListTemplate": "forSearchEnginesList.htm",
+        "searchEngineFileTemplate": "forSearchEngines.htm",
+        "searchEngineRobotsDb": "WEB-INF/robots.db",
+        "useDataStore": true,
+        "dataStoreClass": "org.cofax.SqlDataStore",
+        "redirectionClass": "org.cofax.SqlRedirection",
+        "dataStoreName": "cofax",
+        "dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver",
+        "dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon",
+        "dataStoreUser": "sa",
+        "dataStorePassword": "dataStoreTestQuery",
+        "dataStoreTestQuery": "SET NOCOUNT ON;select test='test';",
+        "dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log",
+        "dataStoreInitConns": 10,
+        "dataStoreMaxConns": 100,
+        "dataStoreConnUsageLimit": 100,
+        "dataStoreLogLevel": "debug",
+        "maxUrlLength": 500}},
+    {
+      "servlet-name": "cofaxEmail",
+      "servlet-class": "org.cofax.cds.EmailServlet",
+      "init-param": {
+      "mailHost": "mail1",
+      "mailHostOverride": "mail2"}},
+    {
+      "servlet-name": "cofaxAdmin",
+      "servlet-class": "org.cofax.cds.AdminServlet"},
+ 
+    {
+      "servlet-name": "fileServlet",
+      "servlet-class": "org.cofax.cds.FileServlet"},
+    {
+      "servlet-name": "cofaxTools",
+      "servlet-class": "org.cofax.cms.CofaxToolsServlet",
+      "init-param": {
+        "templatePath": "toolstemplates/",
+        "log": 1,
+        "logLocation": "/usr/local/tomcat/logs/CofaxTools.log",
+        "logMaxSize": "",
+        "dataLog": 1,
+        "dataLogLocation": "/usr/local/tomcat/logs/dataLog.log",
+        "dataLogMaxSize": "",
+        "removePageCache": "/content/admin/remove?cache=pages&id=",
+        "removeTemplateCache": "/content/admin/remove?cache=templates&id=",
+        "fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder",
+        "lookInContext": 1,
+        "adminGroupID": 4,
+        "betaServer": true}}],
+  "servlet-mapping": {
+    "cofaxCDS": "/",
+    "cofaxEmail": "/cofaxutil/aemail/*",
+    "cofaxAdmin": "/admin/*",
+    "fileServlet": "/static/*",
+    "cofaxTools": "/tools/*"},
+ 
+  "taglib": {
+    "taglib-uri": "cofax.tld",
+    "taglib-location": "/WEB-INF/tlds/cofax.tld"}}}

Added: hadoop/avro/trunk/src/c/json/pass/json_org_example2
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/json_org_example2?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/json_org_example2 (added)
+++ hadoop/avro/trunk/src/c/json/pass/json_org_example2 Tue Jul 14 22:59:22 2009
@@ -0,0 +1,26 @@
+{"widget": {
+    "debug": "on",
+    "window": {
+        "title": "Sample Konfabulator Widget",
+        "name": "main_window",
+        "width": 500,
+        "height": 500
+    },
+    "image": { 
+        "src": "Images/Sun.png",
+        "name": "sun1",
+        "hOffset": 250,
+        "vOffset": 250,
+        "alignment": "center"
+    },
+    "text": {
+        "data": "Click Here",
+        "size": 36,
+        "style": "bold",
+        "name": "text1",
+        "hOffset": 250,
+        "vOffset": 100,
+        "alignment": "center",
+        "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;"
+    }
+}}    

Added: hadoop/avro/trunk/src/c/json/pass/json_org_example3
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/json_org_example3?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/json_org_example3 (added)
+++ hadoop/avro/trunk/src/c/json/pass/json_org_example3 Tue Jul 14 22:59:22 2009
@@ -0,0 +1,23 @@
+{
+    "glossary": {
+        "title": "example glossary",
+		"GlossDiv": {
+            "title": "S",
+			"GlossList": {
+                "GlossEntry": {
+                    "ID": "SGML",
+					"SortAs": "SGML",
+					"GlossTerm": "Standard Generalized Markup Language",
+					"Acronym": "SGML",
+					"Abbrev": "ISO 8879:1986",
+					"GlossDef": {
+                        "para": "A meta-markup language, used to create markup languages
such as DocBook.",
+						"GlossSeeAlso": ["GML", "XML"]
+                    },
+					"GlossSee": "markup"
+                }
+            }
+        }
+    }
+}
+

Added: hadoop/avro/trunk/src/c/json/pass/json_org_test1
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/json_org_test1?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/json_org_test1 (added)
+++ hadoop/avro/trunk/src/c/json/pass/json_org_test1 Tue Jul 14 22:59:22 2009
@@ -0,0 +1,58 @@
+[
+    "JSON Test Pattern pass1",
+    {"object with 1 member":["array with 1 element"]},
+    {},
+    [],
+    -42,
+    true,
+    false,
+    null,
+    {
+        "integer": 1234567890,
+        "real": -9876.543210,
+        "e": 0.123456789e-12,
+        "E": 1.234567890E+34,
+        "":  23456789012E66,
+        "zero": 0,
+        "one": 1,
+        "space": " ",
+        "quote": "\"",
+        "backslash": "\\",
+        "controls": "\b\f\n\r\t",
+        "slash": "/ & \/",
+        "alpha": "abcdefghijklmnopqrstuvwyz",
+        "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ",
+        "digit": "0123456789",
+        "0123456789": "digit",
+        "special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?",
+        "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A",
+        "true": true,
+        "false": false,
+        "null": null,
+        "array":[  ],
+        "object":{  },
+        "address": "50 St. James Street",
+        "url": "http://www.JSON.org/",
+        "comment": "// /* <!-- --",
+        "# -- --> */": " ",
+        " s p a c e d " :[1,2 , 3
+
+,
+
+4 , 5        ,          6           ,7        ],"compact":[1,2,3,4,5,6,7],
+        "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}",
+        "quotes": "&#34; \u0022 %22 0x22 034 &#x22;",
+        "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?"
+: "A key can be any string"
+    },
+    0.5 ,98.6
+,
+99.44
+,
+
+1066,
+1e1,
+0.1e1,
+1e-1,
+1e00,2e+00,2e-00
+,"rosebud"]

Added: hadoop/avro/trunk/src/c/json/pass/object_with_duplicate_keys
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/object_with_duplicate_keys?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/object_with_duplicate_keys (added)
+++ hadoop/avro/trunk/src/c/json/pass/object_with_duplicate_keys Tue Jul 14 22:59:22 2009
@@ -0,0 +1,3 @@
+{ "Dup" : "<-- first value",
+  "Dup" : "<-- second value", 
+  "Dup" : "success"}

Modified: hadoop/avro/trunk/src/c/json/pass/object_with_multiple_members
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/object_with_multiple_members?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/object_with_multiple_members (original)
+++ hadoop/avro/trunk/src/c/json/pass/object_with_multiple_members Tue Jul 14 22:59:22 2009
@@ -1,3 +1,4 @@
 { "one" : "value",
-"two" : "value",
-"three" : "value" }
+"Schöne" : "value ™ ∆",
+"three" : "Schöne Grüße",
+"four" : "This is line one.\n\tThis is line two.\nThis is line three.\n\t\u0041\u0042\u0043"
}

Added: hadoop/avro/trunk/src/c/json/pass/rfc_example
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/rfc_example?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/rfc_example (added)
+++ hadoop/avro/trunk/src/c/json/pass/rfc_example Tue Jul 14 22:59:22 2009
@@ -0,0 +1,22 @@
+[
+      {
+         "precision": "zip",
+         "Latitude":  37.7668,
+         "Longitude": -122.3959,
+         "Address":   "",
+         "City":      "SAN FRANCISCO",
+         "State":     "CA",
+         "Zip":       "94107",
+         "Country":   "US"
+      },
+      {
+         "precision": "zip",
+         "Latitude":  37.371991,
+         "Longitude": -122.026020,
+         "Address":   "",
+         "City":      "SUNNYVALE",
+         "State":     "CA",
+         "Zip":       "94085",
+         "Country":   "US"
+      }
+]

Added: hadoop/avro/trunk/src/c/json/pass/rfc_example2
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/rfc_example2?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/rfc_example2 (added)
+++ hadoop/avro/trunk/src/c/json/pass/rfc_example2 Tue Jul 14 22:59:22 2009
@@ -0,0 +1,13 @@
+{
+      "Image": {
+          "Width":  800,
+          "Height": 600,
+          "Title":  "View from 15th Floor",
+          "Thumbnail": {
+              "Url":    "http://www.example.com/image/481989943",
+              "Height": 125,
+              "Width":  "100"
+          },
+          "IDs": [116, 943, 234, 38793]
+        }
+}

Added: hadoop/avro/trunk/src/c/json/pass/string_u_value
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json/pass/string_u_value?rev=794110&view=auto
==============================================================================
--- hadoop/avro/trunk/src/c/json/pass/string_u_value (added)
+++ hadoop/avro/trunk/src/c/json/pass/string_u_value Tue Jul 14 22:59:22 2009
@@ -0,0 +1 @@
+{ "good" : "\u0041\u0042\u0043-DEF" }

Modified: hadoop/avro/trunk/src/c/json_schema.y
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json_schema.y?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/json_schema.y (original)
+++ hadoop/avro/trunk/src/c/json_schema.y Tue Jul 14 22:59:22 2009
@@ -26,6 +26,7 @@
 %include {
 #include <stdio.h>
 #include <assert.h>
+#include <wchar.h>
 #include "json.h"
 #include "json_tokenizer.h"
 
@@ -107,7 +108,7 @@
 members(A) ::= member_list(B) STRING(C) COLON value(D).
 {
      A = B;
-     apr_hash_set(B, C->string_value, APR_HASH_KEY_STRING, D);
+     apr_hash_set(B, C->string_value, wcslen(C->string_value) * sizeof(wchar_t), D);
 }
 members(A) ::= member_list(B).
 {

Modified: hadoop/avro/trunk/src/c/json_tokenizer.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json_tokenizer.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/json_tokenizer.c (original)
+++ hadoop/avro/trunk/src/c/json_tokenizer.c Tue Jul 14 22:59:22 2009
@@ -20,41 +20,44 @@
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
-#include <ctype.h>
+#include <wchar.h>
+#include <wctype.h>
 
 #include "json_tokenizer.h"
 
 static struct keyword
 {
-  char *z;
+  wchar_t *z;
   int len;
   int tokenType;
 } keywords[] =
 {
   {
-  "true", 4, TK_TRUE},
+  L"true", 4, TK_TRUE},
   {
-  "false", 5, TK_FALSE},
+  L"false", 5, TK_FALSE},
   {
-  "null", 4, TK_NULL}
+  L"null", 4, TK_NULL}
 };
 
 #define NUM_KEYWORDS (sizeof(keywords)/sizeof(keywords[0]))
 
 int
-json_get_token (const char *z, const unsigned len, int *tokenType,
+json_get_token (const wchar_t * z, const size_t len, int *tokenType,
 		double *number)
 {
-  char *p;
+  wchar_t *p;
   int i;
   if (!z || !tokenType || len == 0 || !number)
     {
       return -1;
     }
 
-  if (isspace (z[0]))
+  *tokenType = TK_ILLEGAL;
+
+  if (iswspace (z[0]))
     {
-      for (i = 1; isspace (z[i]); i++)
+      for (i = 1; iswspace (z[i]); i++)
 	{
 	}
       *tokenType = TK_SPACE;
@@ -65,17 +68,73 @@
     {
     case '"':
       {
-	/* Find the end quote */
+	/* NOTE: See RFC 4627 Section 2.5 */
 	for (i = 1; i < len; i++)
 	  {
-	    /* TODO: escape characters? */
-	    if (z[i] == '"' && z[i - 1] != '\\')
+	    /* Check if we're at the end of the string */
+	    if (z[i] == '"')
 	      {
 		*tokenType = TK_STRING;
 		return i + 1;
 	      }
+	    /* Check for characters that are allowed unescaped */
+	    else if (z[i] == 0x20 || z[i] == 0x21 ||
+		     (z[i] >= 0x23 && z[i] <= 0x5B) ||
+		     (z[i] >= 0x5D && z[i] <= 0x10FFFF))
+	      {
+		continue;
+	      }
+	    /* Check for allowed escaped characters */
+	    else if (z[i] == '\\')
+	      {
+		if (++i >= len)
+		  {
+		    return -1;
+		  }
+
+		switch (z[i])
+		  {
+		  case '"':
+		  case '\\':
+		  case '/':
+		  case 'b':
+		  case 'f':
+		  case 'n':
+		  case 'r':
+		  case 't':
+		    break;
+		  case 'u':
+		    {
+		      int offset;
+		      i += 4;
+		      if (i >= len)
+			{
+			  return -1;
+			}
+
+		      /* Check the four characters following \u are valid hex */
+		      for (offset = 3; offset >= 0; offset--)
+			{
+			  if (!iswxdigit (z[i - offset]))
+			    {
+			      /* Illegal non-hex character after \u */
+			      return -1;
+			    }
+			}
+		      break;
+		    }
+
+		  default:
+		    /* Illegal escape value */
+		    return -1;
+		  }
+	      }
+	    else
+	      {
+		/* Illegal code */
+		return -1;
+	      }
 	  }
-	/* TODO: think about this... */
 	break;
       }
     case ':':
@@ -109,18 +168,20 @@
 	return 1;
       }
     }
-  /* check for keywords */
+
+  /* Check for keywords */
   for (i = 0; i < NUM_KEYWORDS; i++)
     {
       struct keyword *kw = keywords + i;
-      if (strncmp ((char *) z, kw->z, kw->len) == 0)
+      if (wcsncmp (z, kw->z, kw->len) == 0)
 	{
 	  *tokenType = kw->tokenType;
 	  return kw->len;
 	}
     }
+
   /* Check for number */
-  *number = strtod (z, &p);
+  *number = wcstod (z, &p);
   if (p != z)
     {
       *tokenType = TK_NUMBER;
@@ -128,6 +189,6 @@
     }
 
   /* ???? */
-  *tokenType = 0;
-  return 1;
+  *tokenType = TK_ILLEGAL;
+  return -1;
 }

Modified: hadoop/avro/trunk/src/c/json_tokenizer.h
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/json_tokenizer.h?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/json_tokenizer.h (original)
+++ hadoop/avro/trunk/src/c/json_tokenizer.h Tue Jul 14 22:59:22 2009
@@ -20,12 +20,14 @@
 #ifndef JSON_TOKENIZER_H
 #define JSON_TOKENIZER_H
 
+#include <wchar.h>
 #include "json_schema.h"
 
 /* Tokens which are not part of the schema */
 enum json_tokens
 {
-  TK_SPACE = 42424242
+  TK_SPACE = 42424242,
+  TK_ILLEGAL
 };
 
 struct Token
@@ -36,7 +38,7 @@
 };
 typedef struct Token Token;
 
-int json_get_token (const char *z, const unsigned len, int *tokenType,
+int json_get_token (const wchar_t * z, const size_t len, int *tokenType,
 		    double *number);
 
 #endif

Modified: hadoop/avro/trunk/src/c/test_avro_bytes.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_bytes.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_bytes.c (original)
+++ hadoop/avro/trunk/src/c/test_avro_bytes.c Tue Jul 14 22:59:22 2009
@@ -39,8 +39,7 @@
   int i, j;
   int64_t len_in, len_out;
 
-  apr_initialize ();
-  atexit (apr_terminate);
+  avro_initialize ();
 
   srand (time (NULL));
 

Modified: hadoop/avro/trunk/src/c/test_avro_float_double.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_float_double.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_float_double.c (original)
+++ hadoop/avro/trunk/src/c/test_avro_float_double.c Tue Jul 14 22:59:22 2009
@@ -37,8 +37,7 @@
   double d_in, d_out;
   int i;
 
-  apr_initialize ();
-  atexit (apr_terminate);
+  avro_initialize ();
 
   srand (time (NULL));
 

Modified: hadoop/avro/trunk/src/c/test_avro_raw.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_raw.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_raw.c (original)
+++ hadoop/avro/trunk/src/c/test_avro_raw.c Tue Jul 14 22:59:22 2009
@@ -37,8 +37,7 @@
   int64_t i64_in, i64_out;
   int i;
 
-  apr_initialize ();
-  atexit (apr_terminate);
+  avro_initialize ();
 
   srand (time (NULL));
 

Modified: hadoop/avro/trunk/src/c/test_avro_string.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_string.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_string.c (original)
+++ hadoop/avro/trunk/src/c/test_avro_string.c Tue Jul 14 22:59:22 2009
@@ -41,8 +41,7 @@
     "Test"
   };
 
-  apr_initialize ();
-  atexit (apr_terminate);
+  avro_initialize ();
 
   for (i = 0; i < sizeof (test_strings) / sizeof (test_strings[0]); i++)
     {

Modified: hadoop/avro/trunk/src/c/test_avro_zigzag.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_avro_zigzag.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/test_avro_zigzag.c (original)
+++ hadoop/avro/trunk/src/c/test_avro_zigzag.c Tue Jul 14 22:59:22 2009
@@ -55,8 +55,7 @@
   int64_t value_in, value_out;
   int i, j;
 
-  apr_initialize ();
-  atexit (apr_terminate);
+  avro_initialize ();
 
   apr_pool_create (&pool, NULL);
 

Modified: hadoop/avro/trunk/src/c/test_json_parser.c
URL: http://svn.apache.org/viewvc/hadoop/avro/trunk/src/c/test_json_parser.c?rev=794110&r1=794109&r2=794110&view=diff
==============================================================================
--- hadoop/avro/trunk/src/c/test_json_parser.c (original)
+++ hadoop/avro/trunk/src/c/test_json_parser.c Tue Jul 14 22:59:22 2009
@@ -26,6 +26,7 @@
 #include <apr.h>
 #include <apr_pools.h>
 
+#include "avro.h"
 #include "json.h"
 
 #define TRACING 1
@@ -52,8 +53,8 @@
   FILE *file;
   DIR *dir;
   struct dirent *dent;
-  int i, fd, processed;
-  char buf[1024];
+  int i, processed;
+  char buf[4096];
   apr_pool_t *pool;
   JSON_value *value;
   char path[256];
@@ -63,8 +64,7 @@
       srcdir = ".";
     }
 
-  apr_initialize ();
-  atexit (apr_terminate);
+  avro_initialize ();
 
   apr_pool_create (&pool, NULL);
 
@@ -101,6 +101,7 @@
 		  fprintf (stderr, "Can't open file");
 		  return EXIT_FAILURE;
 		}
+
 	      processed = 0;
 	      while (!feof (file))
 		{
@@ -113,11 +114,21 @@
 	      fclose (file);
 
 	      value = JSON_parse (pool, buf, processed);
-	      JSONParserTrace (trace, buf);
-	      if (!value && !td->shouldFail)
+	      if (!value)
 		{
-		  return EXIT_FAILURE;
+		  if (!td->shouldFail)
+		    {
+		      return EXIT_FAILURE;
+		    }
+		}
+	      else
+		{
+		  if (td->shouldFail)
+		    {
+		      return EXIT_FAILURE;
+		    }
 		}
+	      /* JSONParserTrace (trace, buf); */
 	      /* JSON_print (stderr, value); */
 	    }
 	  dent = readdir (dir);



Mime
View raw message