common/jsonapi: support libpq as a client
authorPeter Eisentraut <[email protected]>
Wed, 11 Sep 2024 06:28:35 +0000 (08:28 +0200)
committerPeter Eisentraut <[email protected]>
Wed, 11 Sep 2024 07:01:07 +0000 (09:01 +0200)
Based on a patch by Michael Paquier.

For libpq, use PQExpBuffer instead of StringInfo. This requires us to
track allocation failures so that we can return JSON_OUT_OF_MEMORY as
needed rather than exit()ing.

Author: Jacob Champion <[email protected]>
Co-authored-by: Michael Paquier <[email protected]>
Co-authored-by: Daniel Gustafsson <[email protected]>
Reviewed-by: Peter Eisentraut <[email protected]>
Discussion: https://www.postgresql.org/message-id/flat/d1b467a78e0e36ed85a09adf979d04cf124a9d4b[email protected]

src/common/Makefile
src/common/jsonapi.c
src/common/meson.build
src/include/common/jsonapi.h
src/test/modules/test_json_parser/Makefile
src/test/modules/test_json_parser/meson.build
src/test/modules/test_json_parser/t/001_test_json_parser_incremental.pl
src/test/modules/test_json_parser/t/002_inline.pl
src/test/modules/test_json_parser/t/003_test_semantic.pl

index d4f4b573c08ee69651d22641e6e3b0bd8a865f85..1e2b91c83c4c440f100daf6bc99e18d5a26beb0e 100644 (file)
@@ -103,14 +103,20 @@ endif
 # a matter of policy, because it is not appropriate for general purpose
 # libraries such as libpq to report errors directly.  fe_memutils.c is
 # excluded because libpq must not exit() on allocation failure.
+#
+# The excluded files for _shlib builds are pulled into their own static
+# library, for the benefit of test programs that need not follow the
+# shlib rules.
 OBJS_FRONTEND_SHLIB = \
        $(OBJS_COMMON) \
        restricted_token.o \
        sprompt.o
-OBJS_FRONTEND = \
-       $(OBJS_FRONTEND_SHLIB) \
+OBJS_EXCLUDED_SHLIB = \
        fe_memutils.o \
        logging.o
+OBJS_FRONTEND = \
+       $(OBJS_FRONTEND_SHLIB) \
+       $(OBJS_EXCLUDED_SHLIB)
 
 # foo.o, foo_shlib.o, and foo_srv.o are all built from foo.c
 OBJS_SHLIB = $(OBJS_FRONTEND_SHLIB:%.o=%_shlib.o)
@@ -121,7 +127,7 @@ TOOLSDIR = $(top_srcdir)/src/tools
 GEN_KEYWORDLIST = $(PERL) -I $(TOOLSDIR) $(TOOLSDIR)/gen_keywordlist.pl
 GEN_KEYWORDLIST_DEPS = $(TOOLSDIR)/gen_keywordlist.pl $(TOOLSDIR)/PerfectHash.pm
 
-all: libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a
+all: libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a libpgcommon_excluded_shlib.a
 
 # libpgcommon is needed by some contrib
 install: all installdirs
@@ -154,6 +160,11 @@ libpgcommon_shlib.a: $(OBJS_SHLIB)
        rm -f $@
        $(AR) $(AROPT) $@ $^
 
+# The JSON API normally exits on out-of-memory; disable that behavior for shared
+# library builds. This requires libpq's pqexpbuffer.h.
+jsonapi_shlib.o: override CPPFLAGS += -DJSONAPI_USE_PQEXPBUFFER
+jsonapi_shlib.o: override CPPFLAGS += -I$(libpq_srcdir)
+
 # Because this uses its own compilation rule, it doesn't use the
 # dependency tracking logic from Makefile.global.  To make sure that
 # dependency tracking works anyway for the *_shlib.o files, depend on
@@ -163,6 +174,10 @@ libpgcommon_shlib.a: $(OBJS_SHLIB)
 %_shlib.o: %.c %.o
        $(CC) $(CFLAGS) $(CFLAGS_SL) $(CPPFLAGS) -c $< -o $@
 
+libpgcommon_excluded_shlib.a: $(OBJS_EXCLUDED_SHLIB)
+       rm -f $@
+       $(AR) $(AROPT) $@ $^
+
 #
 # Server versions of object files
 #
@@ -196,6 +211,6 @@ RYU_OBJS = $(RYU_FILES) $(RYU_FILES:%.o=%_shlib.o) $(RYU_FILES:%.o=%_srv.o)
 $(RYU_OBJS): CFLAGS += $(PERMIT_DECLARATION_AFTER_STATEMENT)
 
 clean distclean:
-       rm -f libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a
+       rm -f libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a libpgcommon_excluded_shlib.a
        rm -f $(OBJS_FRONTEND) $(OBJS_SHLIB) $(OBJS_SRV)
        rm -f kwlist_d.h
index 2ffcaaa6fd1d09878c2e63cbf6967aeb43f03bf2..6892a4be4e05f2731e1759b84d9049d4c07c10e0 100644 (file)
 #include "mb/pg_wchar.h"
 #include "port/pg_lfind.h"
 
-#ifndef FRONTEND
+#ifdef JSONAPI_USE_PQEXPBUFFER
+#include "pqexpbuffer.h"
+#else
+#include "lib/stringinfo.h"
 #include "miscadmin.h"
 #endif
 
+/*
+ * By default, we will use palloc/pfree along with StringInfo.  In libpq,
+ * use malloc and PQExpBuffer, and return JSON_OUT_OF_MEMORY on out-of-memory.
+ */
+#ifdef JSONAPI_USE_PQEXPBUFFER
+
+#define STRDUP(s) strdup(s)
+#define ALLOC(size) malloc(size)
+#define ALLOC0(size) calloc(1, size)
+#define REALLOC realloc
+#define FREE(s) free(s)
+
+#define jsonapi_appendStringInfo                       appendPQExpBuffer
+#define jsonapi_appendBinaryStringInfo         appendBinaryPQExpBuffer
+#define jsonapi_appendStringInfoChar           appendPQExpBufferChar
+/* XXX should we add a macro version to PQExpBuffer? */
+#define jsonapi_appendStringInfoCharMacro      appendPQExpBufferChar
+#define jsonapi_makeStringInfo                         createPQExpBuffer
+#define jsonapi_initStringInfo                         initPQExpBuffer
+#define jsonapi_resetStringInfo                                resetPQExpBuffer
+#define jsonapi_termStringInfo                         termPQExpBuffer
+#define jsonapi_destroyStringInfo                      destroyPQExpBuffer
+
+#else                                                  /* !JSONAPI_USE_PQEXPBUFFER */
+
+#define STRDUP(s) pstrdup(s)
+#define ALLOC(size) palloc(size)
+#define ALLOC0(size) palloc0(size)
+#define REALLOC repalloc
+
+#ifdef FRONTEND
+#define FREE pfree
+#else
+/*
+ * Backend pfree() doesn't handle NULL pointers like the frontend's does; smooth
+ * that over to reduce mental gymnastics. Avoid multiple evaluation of the macro
+ * argument to avoid future hair-pulling.
+ */
+#define FREE(s) do {   \
+       void *__v = (s);        \
+       if (__v)                        \
+               pfree(__v);             \
+} while (0)
+#endif
+
+#define jsonapi_appendStringInfo                       appendStringInfo
+#define jsonapi_appendBinaryStringInfo         appendBinaryStringInfo
+#define jsonapi_appendStringInfoChar           appendStringInfoChar
+#define jsonapi_appendStringInfoCharMacro      appendStringInfoCharMacro
+#define jsonapi_makeStringInfo                         makeStringInfo
+#define jsonapi_initStringInfo                         initStringInfo
+#define jsonapi_resetStringInfo                                resetStringInfo
+#define jsonapi_termStringInfo(s)                      pfree((s)->data)
+#define jsonapi_destroyStringInfo                      destroyStringInfo
+
+#endif                                                 /* JSONAPI_USE_PQEXPBUFFER */
+
 /*
  * The context of the parser is maintained by the recursive descent
  * mechanism, but is passed explicitly to the error reporting routine
@@ -103,7 +163,7 @@ struct JsonIncrementalState
 {
        bool            is_last_chunk;
        bool            partial_completed;
-       StringInfoData partial_token;
+       jsonapi_StrValType partial_token;
 };
 
 /*
@@ -219,6 +279,7 @@ static JsonParseErrorType parse_object(JsonLexContext *lex, const JsonSemAction
 static JsonParseErrorType parse_array_element(JsonLexContext *lex, const JsonSemAction *sem);
 static JsonParseErrorType parse_array(JsonLexContext *lex, const JsonSemAction *sem);
 static JsonParseErrorType report_parse_error(JsonParseContext ctx, JsonLexContext *lex);
+static bool allocate_incremental_state(JsonLexContext *lex);
 
 /* the null action object used for pure validation */
 const JsonSemAction nullSemAction =
@@ -227,6 +288,10 @@ const JsonSemAction nullSemAction =
        NULL, NULL, NULL, NULL, NULL
 };
 
+/* sentinels used for out-of-memory conditions */
+static JsonLexContext failed_oom;
+static JsonIncrementalState failed_inc_oom;
+
 /* Parser support routines */
 
 /*
@@ -273,15 +338,11 @@ IsValidJsonNumber(const char *str, size_t len)
 {
        bool            numeric_error;
        size_t          total_len;
-       JsonLexContext dummy_lex;
+       JsonLexContext dummy_lex = {0};
 
        if (len <= 0)
                return false;
 
-       dummy_lex.incremental = false;
-       dummy_lex.inc_state = NULL;
-       dummy_lex.pstack = NULL;
-
        /*
         * json_lex_number expects a leading  '-' to have been eaten already.
         *
@@ -321,6 +382,9 @@ IsValidJsonNumber(const char *str, size_t len)
  * responsible for freeing the returned struct, either by calling
  * freeJsonLexContext() or (in backend environment) via memory context
  * cleanup.
+ *
+ * In shlib code, any out-of-memory failures will be deferred to time
+ * of use; this function is guaranteed to return a valid JsonLexContext.
  */
 JsonLexContext *
 makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
@@ -328,7 +392,9 @@ makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
 {
        if (lex == NULL)
        {
-               lex = palloc0(sizeof(JsonLexContext));
+               lex = ALLOC0(sizeof(JsonLexContext));
+               if (!lex)
+                       return &failed_oom;
                lex->flags |= JSONLEX_FREE_STRUCT;
        }
        else
@@ -339,15 +405,73 @@ makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
        lex->line_number = 1;
        lex->input_length = len;
        lex->input_encoding = encoding;
+       lex->need_escapes = need_escapes;
        if (need_escapes)
        {
-               lex->strval = makeStringInfo();
+               /*
+                * This call can fail in shlib code. We defer error handling to time
+                * of use (json_lex_string()) since we might not need to parse any
+                * strings anyway.
+                */
+               lex->strval = jsonapi_makeStringInfo();
                lex->flags |= JSONLEX_FREE_STRVAL;
        }
 
        return lex;
 }
 
+/*
+ * Allocates the internal bookkeeping structures for incremental parsing. This
+ * can only fail in-band with shlib code.
+ */
+#define JS_STACK_CHUNK_SIZE 64
+#define JS_MAX_PROD_LEN 10             /* more than we need */
+#define JSON_TD_MAX_STACK 6400 /* hard coded for now - this is a REALLY high
+                                                                * number */
+static bool
+allocate_incremental_state(JsonLexContext *lex)
+{
+       void       *pstack,
+                          *prediction,
+                          *fnames,
+                          *fnull;
+
+       lex->inc_state = ALLOC0(sizeof(JsonIncrementalState));
+       pstack = ALLOC(sizeof(JsonParserStack));
+       prediction = ALLOC(JS_STACK_CHUNK_SIZE * JS_MAX_PROD_LEN);
+       fnames = ALLOC(JS_STACK_CHUNK_SIZE * sizeof(char *));
+       fnull = ALLOC(JS_STACK_CHUNK_SIZE * sizeof(bool));
+
+#ifdef JSONAPI_USE_PQEXPBUFFER
+       if (!lex->inc_state
+               || !pstack
+               || !prediction
+               || !fnames
+               || !fnull)
+       {
+               FREE(lex->inc_state);
+               FREE(pstack);
+               FREE(prediction);
+               FREE(fnames);
+               FREE(fnull);
+
+               lex->inc_state = &failed_inc_oom;
+               return false;
+       }
+#endif
+
+       jsonapi_initStringInfo(&(lex->inc_state->partial_token));
+       lex->pstack = pstack;
+       lex->pstack->stack_size = JS_STACK_CHUNK_SIZE;
+       lex->pstack->prediction = prediction;
+       lex->pstack->pred_index = 0;
+       lex->pstack->fnames = fnames;
+       lex->pstack->fnull = fnull;
+
+       lex->incremental = true;
+       return true;
+}
+
 
 /*
  * makeJsonLexContextIncremental
@@ -357,19 +481,20 @@ makeJsonLexContextCstringLen(JsonLexContext *lex, const char *json,
  * we don't need the input, that will be handed in bit by bit to the
  * parse routine. We also need an accumulator for partial tokens in case
  * the boundary between chunks happens to fall in the middle of a token.
+ *
+ * In shlib code, any out-of-memory failures will be deferred to time of use;
+ * this function is guaranteed to return a valid JsonLexContext.
  */
-#define JS_STACK_CHUNK_SIZE 64
-#define JS_MAX_PROD_LEN 10             /* more than we need */
-#define JSON_TD_MAX_STACK 6400 /* hard coded for now - this is a REALLY high
-                                                                * number */
-
 JsonLexContext *
 makeJsonLexContextIncremental(JsonLexContext *lex, int encoding,
                                                          bool need_escapes)
 {
        if (lex == NULL)
        {
-               lex = palloc0(sizeof(JsonLexContext));
+               lex = ALLOC0(sizeof(JsonLexContext));
+               if (!lex)
+                       return &failed_oom;
+
                lex->flags |= JSONLEX_FREE_STRUCT;
        }
        else
@@ -377,42 +502,65 @@ makeJsonLexContextIncremental(JsonLexContext *lex, int encoding,
 
        lex->line_number = 1;
        lex->input_encoding = encoding;
-       lex->incremental = true;
-       lex->inc_state = palloc0(sizeof(JsonIncrementalState));
-       initStringInfo(&(lex->inc_state->partial_token));
-       lex->pstack = palloc(sizeof(JsonParserStack));
-       lex->pstack->stack_size = JS_STACK_CHUNK_SIZE;
-       lex->pstack->prediction = palloc(JS_STACK_CHUNK_SIZE * JS_MAX_PROD_LEN);
-       lex->pstack->pred_index = 0;
-       lex->pstack->fnames = palloc(JS_STACK_CHUNK_SIZE * sizeof(char *));
-       lex->pstack->fnull = palloc(JS_STACK_CHUNK_SIZE * sizeof(bool));
+
+       if (!allocate_incremental_state(lex))
+       {
+               if (lex->flags & JSONLEX_FREE_STRUCT)
+               {
+                       FREE(lex);
+                       return &failed_oom;
+               }
+
+               /* lex->inc_state tracks the OOM failure; we can return here. */
+               return lex;
+       }
+
+       lex->need_escapes = need_escapes;
        if (need_escapes)
        {
-               lex->strval = makeStringInfo();
+               /*
+                * This call can fail in shlib code. We defer error handling to time
+                * of use (json_lex_string()) since we might not need to parse any
+                * strings anyway.
+                */
+               lex->strval = jsonapi_makeStringInfo();
                lex->flags |= JSONLEX_FREE_STRVAL;
        }
+
        return lex;
 }
 
-static inline void
+static inline bool
 inc_lex_level(JsonLexContext *lex)
 {
-       lex->lex_level += 1;
-
-       if (lex->incremental && lex->lex_level >= lex->pstack->stack_size)
+       if (lex->incremental && (lex->lex_level + 1) >= lex->pstack->stack_size)
        {
-               lex->pstack->stack_size += JS_STACK_CHUNK_SIZE;
-               lex->pstack->prediction =
-                       repalloc(lex->pstack->prediction,
-                                        lex->pstack->stack_size * JS_MAX_PROD_LEN);
-               if (lex->pstack->fnames)
-                       lex->pstack->fnames =
-                               repalloc(lex->pstack->fnames,
-                                                lex->pstack->stack_size * sizeof(char *));
-               if (lex->pstack->fnull)
-                       lex->pstack->fnull =
-                               repalloc(lex->pstack->fnull, lex->pstack->stack_size * sizeof(bool));
+               size_t          new_stack_size;
+               char       *new_prediction;
+               char      **new_fnames;
+               bool       *new_fnull;
+
+               new_stack_size = lex->pstack->stack_size + JS_STACK_CHUNK_SIZE;
+
+               new_prediction = REALLOC(lex->pstack->prediction,
+                                                                new_stack_size * JS_MAX_PROD_LEN);
+               new_fnames = REALLOC(lex->pstack->fnames,
+                                                        new_stack_size * sizeof(char *));
+               new_fnull = REALLOC(lex->pstack->fnull, new_stack_size * sizeof(bool));
+
+#ifdef JSONAPI_USE_PQEXPBUFFER
+               if (!new_prediction || !new_fnames || !new_fnull)
+                       return false;
+#endif
+
+               lex->pstack->stack_size = new_stack_size;
+               lex->pstack->prediction = new_prediction;
+               lex->pstack->fnames = new_fnames;
+               lex->pstack->fnull = new_fnull;
        }
+
+       lex->lex_level += 1;
+       return true;
 }
 
 static inline void
@@ -482,24 +630,31 @@ get_fnull(JsonLexContext *lex)
 void
 freeJsonLexContext(JsonLexContext *lex)
 {
+       static const JsonLexContext empty = {0};
+
+       if (!lex || lex == &failed_oom)
+               return;
+
        if (lex->flags & JSONLEX_FREE_STRVAL)
-               destroyStringInfo(lex->strval);
+               jsonapi_destroyStringInfo(lex->strval);
 
        if (lex->errormsg)
-               destroyStringInfo(lex->errormsg);
+               jsonapi_destroyStringInfo(lex->errormsg);
 
        if (lex->incremental)
        {
-               pfree(lex->inc_state->partial_token.data);
-               pfree(lex->inc_state);
-               pfree(lex->pstack->prediction);
-               pfree(lex->pstack->fnames);
-               pfree(lex->pstack->fnull);
-               pfree(lex->pstack);
+               jsonapi_termStringInfo(&lex->inc_state->partial_token);
+               FREE(lex->inc_state);
+               FREE(lex->pstack->prediction);
+               FREE(lex->pstack->fnames);
+               FREE(lex->pstack->fnull);
+               FREE(lex->pstack);
        }
 
        if (lex->flags & JSONLEX_FREE_STRUCT)
-               pfree(lex);
+               FREE(lex);
+       else
+               *lex = empty;
 }
 
 /*
@@ -522,22 +677,13 @@ JsonParseErrorType
 pg_parse_json(JsonLexContext *lex, const JsonSemAction *sem)
 {
 #ifdef FORCE_JSON_PSTACK
-
-       lex->incremental = true;
-       lex->inc_state = palloc0(sizeof(JsonIncrementalState));
-
        /*
         * We don't need partial token processing, there is only one chunk. But we
         * still need to init the partial token string so that freeJsonLexContext
-        * works.
+        * works, so perform the full incremental initialization.
         */
-       initStringInfo(&(lex->inc_state->partial_token));
-       lex->pstack = palloc(sizeof(JsonParserStack));
-       lex->pstack->stack_size = JS_STACK_CHUNK_SIZE;
-       lex->pstack->prediction = palloc(JS_STACK_CHUNK_SIZE * JS_MAX_PROD_LEN);
-       lex->pstack->pred_index = 0;
-       lex->pstack->fnames = palloc(JS_STACK_CHUNK_SIZE * sizeof(char *));
-       lex->pstack->fnull = palloc(JS_STACK_CHUNK_SIZE * sizeof(bool));
+       if (!allocate_incremental_state(lex))
+               return JSON_OUT_OF_MEMORY;
 
        return pg_parse_json_incremental(lex, sem, lex->input, lex->input_length, true);
 
@@ -546,6 +692,8 @@ pg_parse_json(JsonLexContext *lex, const JsonSemAction *sem)
        JsonTokenType tok;
        JsonParseErrorType result;
 
+       if (lex == &failed_oom)
+               return JSON_OUT_OF_MEMORY;
        if (lex->incremental)
                return JSON_INVALID_LEXER_TYPE;
 
@@ -591,13 +739,16 @@ json_count_array_elements(JsonLexContext *lex, int *elements)
        int                     count;
        JsonParseErrorType result;
 
+       if (lex == &failed_oom)
+               return JSON_OUT_OF_MEMORY;
+
        /*
         * It's safe to do this with a shallow copy because the lexical routines
         * don't scribble on the input. They do scribble on the other pointers
         * etc, so doing this with a copy makes that safe.
         */
        memcpy(&copylex, lex, sizeof(JsonLexContext));
-       copylex.strval = NULL;          /* not interested in values here */
+       copylex.need_escapes = false;   /* not interested in values here */
        copylex.lex_level++;
 
        count = 0;
@@ -658,7 +809,8 @@ pg_parse_json_incremental(JsonLexContext *lex,
        JsonParseContext ctx = JSON_PARSE_VALUE;
        JsonParserStack *pstack = lex->pstack;
 
-
+       if (lex == &failed_oom || lex->inc_state == &failed_inc_oom)
+               return JSON_OUT_OF_MEMORY;
        if (!lex->incremental)
                return JSON_INVALID_LEXER_TYPE;
 
@@ -737,7 +889,9 @@ pg_parse_json_incremental(JsonLexContext *lex,
                                                        if (result != JSON_SUCCESS)
                                                                return result;
                                                }
-                                               inc_lex_level(lex);
+
+                                               if (!inc_lex_level(lex))
+                                                       return JSON_OUT_OF_MEMORY;
                                        }
                                        break;
                                case JSON_SEM_OEND:
@@ -766,7 +920,9 @@ pg_parse_json_incremental(JsonLexContext *lex,
                                                        if (result != JSON_SUCCESS)
                                                                return result;
                                                }
-                                               inc_lex_level(lex);
+
+                                               if (!inc_lex_level(lex))
+                                                       return JSON_OUT_OF_MEMORY;
                                        }
                                        break;
                                case JSON_SEM_AEND:
@@ -793,9 +949,11 @@ pg_parse_json_incremental(JsonLexContext *lex,
                                                json_ofield_action ostart = sem->object_field_start;
                                                json_ofield_action oend = sem->object_field_end;
 
-                                               if ((ostart != NULL || oend != NULL) && lex->strval != NULL)
+                                               if ((ostart != NULL || oend != NULL) && lex->need_escapes)
                                                {
-                                                       fname = pstrdup(lex->strval->data);
+                                                       fname = STRDUP(lex->strval->data);
+                                                       if (fname == NULL)
+                                                               return JSON_OUT_OF_MEMORY;
                                                }
                                                set_fname(lex, fname);
                                        }
@@ -883,14 +1041,21 @@ pg_parse_json_incremental(JsonLexContext *lex,
                                                         */
                                                        if (tok == JSON_TOKEN_STRING)
                                                        {
-                                                               if (lex->strval != NULL)
-                                                                       pstack->scalar_val = pstrdup(lex->strval->data);
+                                                               if (lex->need_escapes)
+                                                               {
+                                                                       pstack->scalar_val = STRDUP(lex->strval->data);
+                                                                       if (pstack->scalar_val == NULL)
+                                                                               return JSON_OUT_OF_MEMORY;
+                                                               }
                                                        }
                                                        else
                                                        {
                                                                ptrdiff_t       tlen = (lex->token_terminator - lex->token_start);
 
-                                                               pstack->scalar_val = palloc(tlen + 1);
+                                                               pstack->scalar_val = ALLOC(tlen + 1);
+                                                               if (pstack->scalar_val == NULL)
+                                                                       return JSON_OUT_OF_MEMORY;
+
                                                                memcpy(pstack->scalar_val, lex->token_start, tlen);
                                                                pstack->scalar_val[tlen] = '\0';
                                                        }
@@ -1025,14 +1190,21 @@ parse_scalar(JsonLexContext *lex, const JsonSemAction *sem)
        /* extract the de-escaped string value, or the raw lexeme */
        if (lex_peek(lex) == JSON_TOKEN_STRING)
        {
-               if (lex->strval != NULL)
-                       val = pstrdup(lex->strval->data);
+               if (lex->need_escapes)
+               {
+                       val = STRDUP(lex->strval->data);
+                       if (val == NULL)
+                               return JSON_OUT_OF_MEMORY;
+               }
        }
        else
        {
                int                     len = (lex->token_terminator - lex->token_start);
 
-               val = palloc(len + 1);
+               val = ALLOC(len + 1);
+               if (val == NULL)
+                       return JSON_OUT_OF_MEMORY;
+
                memcpy(val, lex->token_start, len);
                val[len] = '\0';
        }
@@ -1066,8 +1238,12 @@ parse_object_field(JsonLexContext *lex, const JsonSemAction *sem)
 
        if (lex_peek(lex) != JSON_TOKEN_STRING)
                return report_parse_error(JSON_PARSE_STRING, lex);
-       if ((ostart != NULL || oend != NULL) && lex->strval != NULL)
-               fname = pstrdup(lex->strval->data);
+       if ((ostart != NULL || oend != NULL) && lex->need_escapes)
+       {
+               fname = STRDUP(lex->strval->data);
+               if (fname == NULL)
+                       return JSON_OUT_OF_MEMORY;
+       }
        result = json_lex(lex);
        if (result != JSON_SUCCESS)
                return result;
@@ -1123,6 +1299,11 @@ parse_object(JsonLexContext *lex, const JsonSemAction *sem)
        JsonParseErrorType result;
 
 #ifndef FRONTEND
+
+       /*
+        * TODO: clients need some way to put a bound on stack growth. Parse level
+        * limits maybe?
+        */
        check_stack_depth();
 #endif
 
@@ -1312,15 +1493,27 @@ json_lex(JsonLexContext *lex)
        const char *const end = lex->input + lex->input_length;
        JsonParseErrorType result;
 
-       if (lex->incremental && lex->inc_state->partial_completed)
+       if (lex == &failed_oom || lex->inc_state == &failed_inc_oom)
+               return JSON_OUT_OF_MEMORY;
+
+       if (lex->incremental)
        {
-               /*
-                * We just lexed a completed partial token on the last call, so reset
-                * everything
-                */
-               resetStringInfo(&(lex->inc_state->partial_token));
-               lex->token_terminator = lex->input;
-               lex->inc_state->partial_completed = false;
+               if (lex->inc_state->partial_completed)
+               {
+                       /*
+                        * We just lexed a completed partial token on the last call, so
+                        * reset everything
+                        */
+                       jsonapi_resetStringInfo(&(lex->inc_state->partial_token));
+                       lex->token_terminator = lex->input;
+                       lex->inc_state->partial_completed = false;
+               }
+
+#ifdef JSONAPI_USE_PQEXPBUFFER
+               /* Make sure our partial token buffer is valid before using it below. */
+               if (PQExpBufferDataBroken(lex->inc_state->partial_token))
+                       return JSON_OUT_OF_MEMORY;
+#endif
        }
 
        s = lex->token_terminator;
@@ -1331,7 +1524,7 @@ json_lex(JsonLexContext *lex)
                 * We have a partial token. Extend it and if completed lex it by a
                 * recursive call
                 */
-               StringInfo      ptok = &(lex->inc_state->partial_token);
+               jsonapi_StrValType *ptok = &(lex->inc_state->partial_token);
                size_t          added = 0;
                bool            tok_done = false;
                JsonLexContext dummy_lex;
@@ -1358,7 +1551,7 @@ json_lex(JsonLexContext *lex)
                        {
                                char            c = lex->input[i];
 
-                               appendStringInfoCharMacro(ptok, c);
+                               jsonapi_appendStringInfoCharMacro(ptok, c);
                                added++;
                                if (c == '"' && escapes % 2 == 0)
                                {
@@ -1403,7 +1596,7 @@ json_lex(JsonLexContext *lex)
                                                case '8':
                                                case '9':
                                                        {
-                                                               appendStringInfoCharMacro(ptok, cc);
+                                                               jsonapi_appendStringInfoCharMacro(ptok, cc);
                                                                added++;
                                                        }
                                                        break;
@@ -1424,7 +1617,7 @@ json_lex(JsonLexContext *lex)
 
                                if (JSON_ALPHANUMERIC_CHAR(cc))
                                {
-                                       appendStringInfoCharMacro(ptok, cc);
+                                       jsonapi_appendStringInfoCharMacro(ptok, cc);
                                        added++;
                                }
                                else
@@ -1467,6 +1660,7 @@ json_lex(JsonLexContext *lex)
                dummy_lex.input_length = ptok->len;
                dummy_lex.input_encoding = lex->input_encoding;
                dummy_lex.incremental = false;
+               dummy_lex.need_escapes = lex->need_escapes;
                dummy_lex.strval = lex->strval;
 
                partial_result = json_lex(&dummy_lex);
@@ -1622,8 +1816,7 @@ json_lex(JsonLexContext *lex)
                                        if (lex->incremental && !lex->inc_state->is_last_chunk &&
                                                p == lex->input + lex->input_length)
                                        {
-                                               appendBinaryStringInfo(
-                                                                                          &(lex->inc_state->partial_token), s, end - s);
+                                               jsonapi_appendBinaryStringInfo(&(lex->inc_state->partial_token), s, end - s);
                                                return JSON_INCOMPLETE;
                                        }
 
@@ -1680,8 +1873,9 @@ json_lex_string(JsonLexContext *lex)
        do { \
                if (lex->incremental && !lex->inc_state->is_last_chunk) \
                { \
-                       appendBinaryStringInfo(&lex->inc_state->partial_token, \
-                                                                  lex->token_start, end - lex->token_start); \
+                       jsonapi_appendBinaryStringInfo(&lex->inc_state->partial_token, \
+                                                                                  lex->token_start, \
+                                                                                  end - lex->token_start); \
                        return JSON_INCOMPLETE; \
                } \
                lex->token_terminator = s; \
@@ -1694,8 +1888,15 @@ json_lex_string(JsonLexContext *lex)
                return code; \
        } while (0)
 
-       if (lex->strval != NULL)
-               resetStringInfo(lex->strval);
+       if (lex->need_escapes)
+       {
+#ifdef JSONAPI_USE_PQEXPBUFFER
+               /* make sure initialization succeeded */
+               if (lex->strval == NULL)
+                       return JSON_OUT_OF_MEMORY;
+#endif
+               jsonapi_resetStringInfo(lex->strval);
+       }
 
        Assert(lex->input_length > 0);
        s = lex->token_start;
@@ -1732,7 +1933,7 @@ json_lex_string(JsonLexContext *lex)
                                        else
                                                FAIL_AT_CHAR_END(JSON_UNICODE_ESCAPE_FORMAT);
                                }
-                               if (lex->strval != NULL)
+                               if (lex->need_escapes)
                                {
                                        /*
                                         * Combine surrogate pairs.
@@ -1789,19 +1990,19 @@ json_lex_string(JsonLexContext *lex)
 
                                                unicode_to_utf8(ch, (unsigned char *) utf8str);
                                                utf8len = pg_utf_mblen((unsigned char *) utf8str);
-                                               appendBinaryStringInfo(lex->strval, utf8str, utf8len);
+                                               jsonapi_appendBinaryStringInfo(lex->strval, utf8str, utf8len);
                                        }
                                        else if (ch <= 0x007f)
                                        {
                                                /* The ASCII range is the same in all encodings */
-                                               appendStringInfoChar(lex->strval, (char) ch);
+                                               jsonapi_appendStringInfoChar(lex->strval, (char) ch);
                                        }
                                        else
                                                FAIL_AT_CHAR_END(JSON_UNICODE_HIGH_ESCAPE);
 #endif                                                 /* FRONTEND */
                                }
                        }
-                       else if (lex->strval != NULL)
+                       else if (lex->need_escapes)
                        {
                                if (hi_surrogate != -1)
                                        FAIL_AT_CHAR_END(JSON_UNICODE_LOW_SURROGATE);
@@ -1811,22 +2012,22 @@ json_lex_string(JsonLexContext *lex)
                                        case '"':
                                        case '\\':
                                        case '/':
-                                               appendStringInfoChar(lex->strval, *s);
+                                               jsonapi_appendStringInfoChar(lex->strval, *s);
                                                break;
                                        case 'b':
-                                               appendStringInfoChar(lex->strval, '\b');
+                                               jsonapi_appendStringInfoChar(lex->strval, '\b');
                                                break;
                                        case 'f':
-                                               appendStringInfoChar(lex->strval, '\f');
+                                               jsonapi_appendStringInfoChar(lex->strval, '\f');
                                                break;
                                        case 'n':
-                                               appendStringInfoChar(lex->strval, '\n');
+                                               jsonapi_appendStringInfoChar(lex->strval, '\n');
                                                break;
                                        case 'r':
-                                               appendStringInfoChar(lex->strval, '\r');
+                                               jsonapi_appendStringInfoChar(lex->strval, '\r');
                                                break;
                                        case 't':
-                                               appendStringInfoChar(lex->strval, '\t');
+                                               jsonapi_appendStringInfoChar(lex->strval, '\t');
                                                break;
                                        default:
 
@@ -1861,7 +2062,7 @@ json_lex_string(JsonLexContext *lex)
 
                        /*
                         * Skip to the first byte that requires special handling, so we
-                        * can batch calls to appendBinaryStringInfo.
+                        * can batch calls to jsonapi_appendBinaryStringInfo.
                         */
                        while (p < end - sizeof(Vector8) &&
                                   !pg_lfind8('\\', (uint8 *) p, sizeof(Vector8)) &&
@@ -1885,8 +2086,8 @@ json_lex_string(JsonLexContext *lex)
                                }
                        }
 
-                       if (lex->strval != NULL)
-                               appendBinaryStringInfo(lex->strval, s, p - s);
+                       if (lex->need_escapes)
+                               jsonapi_appendBinaryStringInfo(lex->strval, s, p - s);
 
                        /*
                         * s will be incremented at the top of the loop, so set it to just
@@ -1902,6 +2103,11 @@ json_lex_string(JsonLexContext *lex)
                return JSON_UNICODE_LOW_SURROGATE;
        }
 
+#ifdef JSONAPI_USE_PQEXPBUFFER
+       if (lex->need_escapes && PQExpBufferBroken(lex->strval))
+               return JSON_OUT_OF_MEMORY;
+#endif
+
        /* Hooray, we found the end of the string! */
        lex->prev_token_terminator = lex->token_terminator;
        lex->token_terminator = s + 1;
@@ -2019,8 +2225,8 @@ json_lex_number(JsonLexContext *lex, const char *s,
        if (lex->incremental && !lex->inc_state->is_last_chunk &&
                len >= lex->input_length)
        {
-               appendBinaryStringInfo(&lex->inc_state->partial_token,
-                                                          lex->token_start, s - lex->token_start);
+               jsonapi_appendBinaryStringInfo(&lex->inc_state->partial_token,
+                                                                          lex->token_start, s - lex->token_start);
                if (num_err != NULL)
                        *num_err = error;
 
@@ -2096,19 +2302,25 @@ report_parse_error(JsonParseContext ctx, JsonLexContext *lex)
 char *
 json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
 {
+       if (error == JSON_OUT_OF_MEMORY || lex == &failed_oom)
+       {
+               /* Short circuit. Allocating anything for this case is unhelpful. */
+               return _("out of memory");
+       }
+
        if (lex->errormsg)
-               resetStringInfo(lex->errormsg);
+               jsonapi_resetStringInfo(lex->errormsg);
        else
-               lex->errormsg = makeStringInfo();
+               lex->errormsg = jsonapi_makeStringInfo();
 
        /*
         * A helper for error messages that should print the current token. The
         * format must contain exactly one %.*s specifier.
         */
 #define json_token_error(lex, format) \
-       appendStringInfo((lex)->errormsg, _(format), \
-                                        (int) ((lex)->token_terminator - (lex)->token_start), \
-                                        (lex)->token_start);
+       jsonapi_appendStringInfo((lex)->errormsg, _(format), \
+                                                        (int) ((lex)->token_terminator - (lex)->token_start), \
+                                                        (lex)->token_start);
 
        switch (error)
        {
@@ -2127,9 +2339,9 @@ json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
                        json_token_error(lex, "Escape sequence \"\\%.*s\" is invalid.");
                        break;
                case JSON_ESCAPING_REQUIRED:
-                       appendStringInfo(lex->errormsg,
-                                                        _("Character with value 0x%02x must be escaped."),
-                                                        (unsigned char) *(lex->token_terminator));
+                       jsonapi_appendStringInfo(lex->errormsg,
+                                                                        _("Character with value 0x%02x must be escaped."),
+                                                                        (unsigned char) *(lex->token_terminator));
                        break;
                case JSON_EXPECTED_END:
                        json_token_error(lex, "Expected end of input, but found \"%.*s\".");
@@ -2160,6 +2372,9 @@ json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
                case JSON_INVALID_TOKEN:
                        json_token_error(lex, "Token \"%.*s\" is invalid.");
                        break;
+               case JSON_OUT_OF_MEMORY:
+                       /* should have been handled above; use the error path */
+                       break;
                case JSON_UNICODE_CODE_POINT_ZERO:
                        return _("\\u0000 cannot be converted to text.");
                case JSON_UNICODE_ESCAPE_FORMAT:
@@ -2191,15 +2406,23 @@ json_errdetail(JsonParseErrorType error, JsonLexContext *lex)
        }
 #undef json_token_error
 
-       /*
-        * We don't use a default: case, so that the compiler will warn about
-        * unhandled enum values.  But this needs to be here anyway to cover the
-        * possibility of an incorrect input.
-        */
-       if (lex->errormsg->len == 0)
-               appendStringInfo(lex->errormsg,
-                                                "unexpected json parse error type: %d",
-                                                (int) error);
+       /* Note that lex->errormsg can be NULL in shlib code. */
+       if (lex->errormsg && lex->errormsg->len == 0)
+       {
+               /*
+                * We don't use a default: case, so that the compiler will warn about
+                * unhandled enum values.  But this needs to be here anyway to cover
+                * the possibility of an incorrect input.
+                */
+               jsonapi_appendStringInfo(lex->errormsg,
+                                                                "unexpected json parse error type: %d",
+                                                                (int) error);
+       }
+
+#ifdef JSONAPI_USE_PQEXPBUFFER
+       if (PQExpBufferBroken(lex->errormsg))
+               return _("out of memory while constructing error description");
+#endif
 
        return lex->errormsg->data;
 }
index d396e11ce99416f0e3d1ada2de5c25daa53100d0..538e0f43d55b12146bb5cbb14b8680ef4c25a59a 100644 (file)
@@ -102,6 +102,10 @@ common_sources_cflags = {
 # a matter of policy, because it is not appropriate for general purpose
 # libraries such as libpq to report errors directly.  fe_memutils.c is
 # excluded because libpq must not exit() on allocation failure.
+#
+# The excluded files for _shlib builds are pulled into their own static
+# library, for the benefit of test programs that need not follow the
+# shlib rules.
 
 common_sources_frontend_shlib = common_sources
 common_sources_frontend_shlib += files(
@@ -109,12 +113,16 @@ common_sources_frontend_shlib += files(
   'sprompt.c',
 )
 
-common_sources_frontend_static = common_sources_frontend_shlib
-common_sources_frontend_static += files(
+common_sources_excluded_shlib = files(
   'fe_memutils.c',
   'logging.c',
 )
 
+common_sources_frontend_static = [
+  common_sources_frontend_shlib,
+  common_sources_excluded_shlib,
+]
+
 # Build pgcommon once for backend, once for use in frontend binaries, and
 # once for use in shared libraries
 #
@@ -142,6 +150,10 @@ pgcommon_variants = {
     'pic': true,
     'sources': common_sources_frontend_shlib,
     'dependencies': [frontend_common_code],
+    # The JSON API normally exits on out-of-memory; disable that behavior for
+    # shared library builds. This requires libpq's pqexpbuffer.h.
+    'c_args': ['-DJSONAPI_USE_PQEXPBUFFER'],
+    'include_directories': include_directories('../interfaces/libpq'),
   },
 }
 
@@ -157,8 +169,11 @@ foreach name, opts : pgcommon_variants
     c_args = opts.get('c_args', []) + common_cflags[cflagname]
     cflag_libs += static_library('libpgcommon@0@_@1@'.format(name, cflagname),
       c_pch: pch_c_h,
-      include_directories: include_directories('.'),
       kwargs: opts + {
+        'include_directories': [
+          include_directories('.'),
+          opts.get('include_directories', []),
+        ],
         'sources': sources,
         'c_args': c_args,
         'build_by_default': false,
@@ -170,8 +185,11 @@ foreach name, opts : pgcommon_variants
   lib = static_library('libpgcommon@0@'.format(name),
       link_with: cflag_libs,
       c_pch: pch_c_h,
-      include_directories: include_directories('.'),
       kwargs: opts + {
+        'include_directories': [
+          include_directories('.'),
+          opts.get('include_directories', []),
+        ],
         'dependencies': opts['dependencies'] + [ssl],
       }
     )
@@ -182,4 +200,13 @@ common_srv = pgcommon['_srv']
 common_shlib = pgcommon['_shlib']
 common_static = pgcommon['']
 
+common_excluded_shlib = static_library('libpgcommon_excluded_shlib',
+  sources: common_sources_excluded_shlib,
+  dependencies: [frontend_common_code],
+  build_by_default: false,
+  kwargs: default_lib_args + {
+    'install': false,
+  },
+)
+
 subdir('unicode')
index a995fdbe0819929b1f03ea9878269c03b781485b..c524ff5be8bfbf525649a1f5b1449302c0e96f4f 100644 (file)
@@ -14,8 +14,6 @@
 #ifndef JSONAPI_H
 #define JSONAPI_H
 
-#include "lib/stringinfo.h"
-
 typedef enum JsonTokenType
 {
        JSON_TOKEN_INVALID,
@@ -51,6 +49,7 @@ typedef enum JsonParseErrorType
        JSON_EXPECTED_OBJECT_NEXT,
        JSON_EXPECTED_STRING,
        JSON_INVALID_TOKEN,
+       JSON_OUT_OF_MEMORY,
        JSON_UNICODE_CODE_POINT_ZERO,
        JSON_UNICODE_ESCAPE_FORMAT,
        JSON_UNICODE_HIGH_ESCAPE,
@@ -64,6 +63,16 @@ typedef enum JsonParseErrorType
 typedef struct JsonParserStack JsonParserStack;
 typedef struct JsonIncrementalState JsonIncrementalState;
 
+/*
+ * Don't depend on the internal type header for strval; if callers need access
+ * then they can include the appropriate header themselves.
+ */
+#ifdef JSONAPI_USE_PQEXPBUFFER
+#define jsonapi_StrValType PQExpBufferData
+#else
+#define jsonapi_StrValType StringInfoData
+#endif
+
 /*
  * All the fields in this structure should be treated as read-only.
  *
@@ -102,8 +111,9 @@ typedef struct JsonLexContext
        const char *line_start;         /* where that line starts within input */
        JsonParserStack *pstack;
        JsonIncrementalState *inc_state;
-       StringInfo      strval;
-       StringInfo      errormsg;
+       bool            need_escapes;
+       struct jsonapi_StrValType *strval;      /* only used if need_escapes == true */
+       struct jsonapi_StrValType *errormsg;
 } JsonLexContext;
 
 typedef JsonParseErrorType (*json_struct_action) (void *state);
index 2dc7175b7c8fe79cca695f20dbd105970bb7502d..af3f19424ed88cdc32536b50db4192b89c8b0352 100644 (file)
@@ -6,7 +6,7 @@ TAP_TESTS = 1
 
 OBJS = test_json_parser_incremental.o test_json_parser_perf.o $(WIN32RES)
 
-EXTRA_CLEAN = test_json_parser_incremental$(X) test_json_parser_perf$(X)
+EXTRA_CLEAN = test_json_parser_incremental$(X) test_json_parser_incremental_shlib$(X) test_json_parser_perf$(X)
 
 ifdef USE_PGXS
 PG_CONFIG = pg_config
@@ -19,13 +19,16 @@ include $(top_builddir)/src/Makefile.global
 include $(top_srcdir)/contrib/contrib-global.mk
 endif
 
-all: test_json_parser_incremental$(X) test_json_parser_perf$(X)
+all: test_json_parser_incremental$(X) test_json_parser_incremental_shlib$(X) test_json_parser_perf$(X)
 
 %.o: $(top_srcdir)/$(subdir)/%.c
 
 test_json_parser_incremental$(X): test_json_parser_incremental.o $(WIN32RES)
        $(CC) $(CFLAGS) $^ $(PG_LIBS_INTERNAL) $(LDFLAGS) $(LDFLAGS_EX) $(PG_LIBS) $(LIBS) -o $@
 
+test_json_parser_incremental_shlib$(X): test_json_parser_incremental.o $(WIN32RES)
+       $(CC) $(CFLAGS) $^ $(LDFLAGS) -lpgcommon_excluded_shlib $(libpq_pgport_shlib) $(filter -lintl, $(LIBS)) -o $@
+
 test_json_parser_perf$(X): test_json_parser_perf.o $(WIN32RES)
        $(CC) $(CFLAGS) $^ $(PG_LIBS_INTERNAL) $(LDFLAGS) $(LDFLAGS_EX) $(PG_LIBS) $(LIBS) -o $@
 
index b224f3e07e2b863fb91a6390173a1c707068b70a..059a8b71bde21503c8ffce2267c84c5a866d2eff 100644 (file)
@@ -19,6 +19,18 @@ test_json_parser_incremental = executable('test_json_parser_incremental',
   },
 )
 
+# A second version of test_json_parser_incremental, this time compiled against
+# the shared-library flavor of jsonapi.
+test_json_parser_incremental_shlib = executable('test_json_parser_incremental_shlib',
+  test_json_parser_incremental_sources,
+  dependencies: [frontend_shlib_code, libpq],
+  c_args: ['-DJSONAPI_SHLIB_ALLOC'],
+  link_with: [common_excluded_shlib],
+  kwargs: default_bin_args + {
+    'install': false,
+  },
+)
+
 test_json_parser_perf_sources = files(
   'test_json_parser_perf.c',
 )
index abf0d7a2375d19cae24cce0bc9225ee41234a608..8cc42e8e292ed91b6536a79e98238c891ebc68ac 100644 (file)
@@ -13,20 +13,25 @@ use FindBin;
 
 my $test_file = "$FindBin::RealBin/../tiny.json";
 
-my $exe = "test_json_parser_incremental";
+my @exes =
+  ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
 
-# Test the  usage error
-my ($stdout, $stderr) = run_command([ $exe, "-c", 10 ]);
-like($stderr, qr/Usage:/, 'error message if not enough arguments');
+foreach my $exe (@exes)
+{
+       note "testing executable $exe";
 
-# Test that we get success for small chunk sizes from 64 down to 1.
+       # Test the  usage error
+       my ($stdout, $stderr) = run_command([ $exe, "-c", 10 ]);
+       like($stderr, qr/Usage:/, 'error message if not enough arguments');
 
-for (my $size = 64; $size > 0; $size--)
-{
-       ($stdout, $stderr) = run_command([ $exe, "-c", $size, $test_file ]);
+       # Test that we get success for small chunk sizes from 64 down to 1.
+       for (my $size = 64; $size > 0; $size--)
+       {
+               ($stdout, $stderr) = run_command([ $exe, "-c", $size, $test_file ]);
 
-       like($stdout, qr/SUCCESS/, "chunk size $size: test succeeds");
-       is($stderr, "", "chunk size $size: no error output");
+               like($stdout, qr/SUCCESS/, "chunk size $size: test succeeds");
+               is($stderr, "", "chunk size $size: no error output");
+       }
 }
 
 done_testing();
index 8d62eb44c8cfac08362982a0fc09b6b88e3d079c..5b6c6dc4ae7ea8108a32700fa4dcce44a226c294 100644 (file)
@@ -13,13 +13,13 @@ use Test::More;
 use File::Temp qw(tempfile);
 
 my $dir = PostgreSQL::Test::Utils::tempdir;
+my $exe;
 
 sub test
 {
        local $Test::Builder::Level = $Test::Builder::Level + 1;
 
        my ($name, $json, %params) = @_;
-       my $exe = "test_json_parser_incremental";
        my $chunk = length($json);
 
        # Test the input with chunk sizes from max(input_size, 64) down to 1
@@ -53,86 +53,99 @@ sub test
        }
 }
 
-test("number", "12345");
-test("string", '"hello"');
-test("false", "false");
-test("true", "true");
-test("null", "null");
-test("empty object", "{}");
-test("empty array", "[]");
-test("array with number", "[12345]");
-test("array with numbers", "[12345,67890]");
-test("array with null", "[null]");
-test("array with string", '["hello"]');
-test("array with boolean", '[false]');
-test("single pair", '{"key": "value"}');
-test("heavily nested array", "[" x 3200 . "]" x 3200);
-test("serial escapes", '"\\\\\\\\\\\\\\\\"');
-test("interrupted escapes", '"\\\\\\"\\\\\\\\\\"\\\\"');
-test("whitespace", '     ""     ');
-
-test("unclosed empty object",
-       "{", error => qr/input string ended unexpectedly/);
-test("bad key", "{{", error => qr/Expected string or "}", but found "\{"/);
-test("bad key", "{{}", error => qr/Expected string or "}", but found "\{"/);
-test("numeric key", "{1234: 2}",
-       error => qr/Expected string or "}", but found "1234"/);
-test(
-       "second numeric key",
-       '{"a": "a", 1234: 2}',
-       error => qr/Expected string, but found "1234"/);
-test(
-       "unclosed object with pair",
-       '{"key": "value"',
-       error => qr/input string ended unexpectedly/);
-test("missing key value",
-       '{"key": }', error => qr/Expected JSON value, but found "}"/);
-test(
-       "missing colon",
-       '{"key" 12345}',
-       error => qr/Expected ":", but found "12345"/);
-test(
-       "missing comma",
-       '{"key": 12345 12345}',
-       error => qr/Expected "," or "}", but found "12345"/);
-test("overnested array",
-       "[" x 6401, error => qr/maximum permitted depth is 6400/);
-test("overclosed array",
-       "[]]", error => qr/Expected end of input, but found "]"/);
-test("unexpected token in array",
-       "[ }}} ]", error => qr/Expected array element or "]", but found "}"/);
-test("junk punctuation", "[ ||| ]", error => qr/Token "|" is invalid/);
-test("missing comma in array",
-       "[123 123]", error => qr/Expected "," or "]", but found "123"/);
-test("misspelled boolean", "tru", error => qr/Token "tru" is invalid/);
-test(
-       "misspelled boolean in array",
-       "[tru]",
-       error => qr/Token "tru" is invalid/);
-test("smashed top-level scalar", "12zz",
-       error => qr/Token "12zz" is invalid/);
-test(
-       "smashed scalar in array",
-       "[12zz]",
-       error => qr/Token "12zz" is invalid/);
-test(
-       "unknown escape sequence",
-       '"hello\vworld"',
-       error => qr/Escape sequence "\\v" is invalid/);
-test("unescaped control",
-       "\"hello\tworld\"",
-       error => qr/Character with value 0x09 must be escaped/);
-test(
-       "incorrect escape count",
-       '"\\\\\\\\\\\\\\"',
-       error => qr/Token ""\\\\\\\\\\\\\\"" is invalid/);
-
-# Case with three bytes: double-quote, backslash and <f5>.
-# Both invalid-token and invalid-escape are possible errors, because for
-# smaller chunk sizes the incremental parser skips the string parsing when
-# it cannot find an ending quote.
-test("incomplete UTF-8 sequence",
-       "\"\\\x{F5}",
-       error => qr/(Token|Escape sequence) ""?\\\x{F5}" is invalid/);
+my @exes =
+  ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
+
+foreach (@exes)
+{
+       $exe = $_;
+       note "testing executable $exe";
+
+       test("number", "12345");
+       test("string", '"hello"');
+       test("false", "false");
+       test("true", "true");
+       test("null", "null");
+       test("empty object", "{}");
+       test("empty array", "[]");
+       test("array with number", "[12345]");
+       test("array with numbers", "[12345,67890]");
+       test("array with null", "[null]");
+       test("array with string", '["hello"]');
+       test("array with boolean", '[false]');
+       test("single pair", '{"key": "value"}');
+       test("heavily nested array", "[" x 3200 . "]" x 3200);
+       test("serial escapes", '"\\\\\\\\\\\\\\\\"');
+       test("interrupted escapes", '"\\\\\\"\\\\\\\\\\"\\\\"');
+       test("whitespace", '     ""     ');
+
+       test("unclosed empty object",
+               "{", error => qr/input string ended unexpectedly/);
+       test("bad key", "{{",
+               error => qr/Expected string or "}", but found "\{"/);
+       test("bad key", "{{}",
+               error => qr/Expected string or "}", but found "\{"/);
+       test("numeric key", "{1234: 2}",
+               error => qr/Expected string or "}", but found "1234"/);
+       test(
+               "second numeric key",
+               '{"a": "a", 1234: 2}',
+               error => qr/Expected string, but found "1234"/);
+       test(
+               "unclosed object with pair",
+               '{"key": "value"',
+               error => qr/input string ended unexpectedly/);
+       test("missing key value",
+               '{"key": }', error => qr/Expected JSON value, but found "}"/);
+       test(
+               "missing colon",
+               '{"key" 12345}',
+               error => qr/Expected ":", but found "12345"/);
+       test(
+               "missing comma",
+               '{"key": 12345 12345}',
+               error => qr/Expected "," or "}", but found "12345"/);
+       test("overnested array",
+               "[" x 6401, error => qr/maximum permitted depth is 6400/);
+       test("overclosed array",
+               "[]]", error => qr/Expected end of input, but found "]"/);
+       test("unexpected token in array",
+               "[ }}} ]", error => qr/Expected array element or "]", but found "}"/);
+       test("junk punctuation", "[ ||| ]", error => qr/Token "|" is invalid/);
+       test("missing comma in array",
+               "[123 123]", error => qr/Expected "," or "]", but found "123"/);
+       test("misspelled boolean", "tru", error => qr/Token "tru" is invalid/);
+       test(
+               "misspelled boolean in array",
+               "[tru]",
+               error => qr/Token "tru" is invalid/);
+       test(
+               "smashed top-level scalar",
+               "12zz",
+               error => qr/Token "12zz" is invalid/);
+       test(
+               "smashed scalar in array",
+               "[12zz]",
+               error => qr/Token "12zz" is invalid/);
+       test(
+               "unknown escape sequence",
+               '"hello\vworld"',
+               error => qr/Escape sequence "\\v" is invalid/);
+       test("unescaped control",
+               "\"hello\tworld\"",
+               error => qr/Character with value 0x09 must be escaped/);
+       test(
+               "incorrect escape count",
+               '"\\\\\\\\\\\\\\"',
+               error => qr/Token ""\\\\\\\\\\\\\\"" is invalid/);
+
+       # Case with three bytes: double-quote, backslash and <f5>.
+       # Both invalid-token and invalid-escape are possible errors, because for
+       # smaller chunk sizes the incremental parser skips the string parsing when
+       # it cannot find an ending quote.
+       test("incomplete UTF-8 sequence",
+               "\"\\\x{F5}",
+               error => qr/(Token|Escape sequence) ""?\\\x{F5}" is invalid/);
+}
 
 done_testing();
index b6553bbcddf050782398c9df5743ffbe58c35808..c11480172d3cd17d0ae67b507648709e75a0417d 100644 (file)
@@ -16,24 +16,31 @@ use File::Temp qw(tempfile);
 my $test_file = "$FindBin::RealBin/../tiny.json";
 my $test_out = "$FindBin::RealBin/../tiny.out";
 
-my $exe = "test_json_parser_incremental";
+my @exes =
+  ("test_json_parser_incremental", "test_json_parser_incremental_shlib");
 
-my ($stdout, $stderr) = run_command([ $exe, "-s", $test_file ]);
+foreach my $exe (@exes)
+{
+       note "testing executable $exe";
 
-is($stderr, "", "no error output");
+       my ($stdout, $stderr) = run_command([ $exe, "-s", $test_file ]);
 
-my $dir = PostgreSQL::Test::Utils::tempdir;
-my ($fh, $fname) = tempfile(DIR => $dir);
+       is($stderr, "", "no error output");
 
-print $fh $stdout, "\n";
+       my $dir = PostgreSQL::Test::Utils::tempdir;
+       my ($fh, $fname) = tempfile(DIR => $dir);
 
-close($fh);
+       print $fh $stdout, "\n";
 
-my @diffopts = ("-u");
-push(@diffopts, "--strip-trailing-cr") if $windows_os;
-($stdout, $stderr) = run_command([ "diff", @diffopts, $fname, $test_out ]);
+       close($fh);
 
-is($stdout, "", "no output diff");
-is($stderr, "", "no diff error");
+       my @diffopts = ("-u");
+       push(@diffopts, "--strip-trailing-cr") if $windows_os;
+       ($stdout, $stderr) =
+         run_command([ "diff", @diffopts, $fname, $test_out ]);
+
+       is($stdout, "", "no output diff");
+       is($stderr, "", "no diff error");
+}
 
 done_testing();