/* * SQL function json_array_length(json) -> int */ Datum json_array_length(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_P(0); AlenState state; JsonLexContext *lex = makeJsonLexContext(json, false); JsonSemAction sem; state = palloc0(sizeof(alenState)); sem = palloc0(sizeof(jsonSemAction)); /* palloc0 does this for us */ #if 0 state->count = 0; #endif state->lex = lex; sem->semstate = (void *) state; sem->object_start = alen_object_start; sem->scalar = alen_scalar; sem->array_element_start = alen_array_element_start; pg_parse_json(lex, sem); PG_RETURN_INT32(state->count); }
void json_string_to_msgpack(const char *json_str, msgpack_sbuffer *buf) { PackState state; JsonLexContext *lex = makeJsonLexContext(cstring_to_text(json_str), true); JsonSemAction *sem; msgpack_packer *pk; JsonContainer top_level; JsonValue top_level_value; /* initialize packer */ pk = msgpack_packer_new(buf, msgpack_sbuffer_write); /* initialize top level object */ top_level_value = palloc(sizeof(JsonValueData)); top_level = palloc(sizeof(JsonContainerData)); top_level->type = JSON_TOP_LEVEL; top_level->parent = NULL; top_level->via.top_level.value = top_level_value; /* initialize state object */ state = palloc(sizeof(PackStateData)); state->lex = lex; state->current_container = top_level; state->pk = pk; state->buf = buf; /* initialize sem action */ sem = palloc(sizeof(JsonSemAction)); sem->semstate = (void *) state; sem->object_start = sem_object_start; sem->object_end = sem_object_end; sem->array_start = sem_array_start; sem->array_end = sem_array_end; sem->object_field_start = sem_object_field_start; sem->object_field_end = NULL; sem->array_element_start = sem_array_element_start; sem->array_element_end = NULL; sem->scalar = sem_scalar; /* run parser */ pg_parse_json(lex, sem); /* pack top level value */ pack_value(pk, top_level_value); /* destroy packer */ msgpack_packer_free(pk); /* destroy top level object */ destroy_container(top_level); }
/* * Input. */ Datum json_in(PG_FUNCTION_ARGS) { char *json = PG_GETARG_CSTRING(0); text *result = cstring_to_text(json); JsonLexContext *lex; /* validate it */ lex = makeJsonLexContext(result, false); pg_parse_json(lex, &nullSemAction); /* Internal representation is the same as text, for now */ PG_RETURN_TEXT_P(result); }
/* * Binary receive. */ Datum json_recv(PG_FUNCTION_ARGS) { StringInfo buf = (StringInfo) PG_GETARG_POINTER(0); text *result; char *str; int nbytes; JsonLexContext *lex; str = pq_getmsgtext(buf, buf->len - buf->cursor, &nbytes); result = palloc(nbytes + VARHDRSZ); SET_VARSIZE(result, nbytes + VARHDRSZ); memcpy(VARDATA(result), str, nbytes); /* Validate it. */ lex = makeJsonLexContext(result, false); pg_parse_json(lex, &nullSemAction); PG_RETURN_TEXT_P(result); }
/* * SQL function json_typeof(json) -> text * * Returns the type of the outermost JSON value as TEXT. Possible types are * "object", "array", "string", "number", "boolean", and "null". * * Performs a single call to json_lex() to get the first token of the supplied * value. This initial token uniquely determines the value's type. As our * input must already have been validated by json_in() or json_recv(), the * initial token should never be JSON_TOKEN_OBJECT_END, JSON_TOKEN_ARRAY_END, * JSON_TOKEN_COLON, JSON_TOKEN_COMMA, or JSON_TOKEN_END. */ Datum json_typeof(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_P(0); JsonLexContext *lex = makeJsonLexContext(json, false); JsonTokenType tok; char *type; /* Lex exactly one token from the input and check its type. */ json_lex(lex); tok = lex_peek(lex); switch (tok) { case JSON_TOKEN_OBJECT_START: type = "object"; break; case JSON_TOKEN_ARRAY_START: type = "array"; break; case JSON_TOKEN_STRING: type = "string"; break; case JSON_TOKEN_NUMBER: type = "number"; break; case JSON_TOKEN_TRUE: case JSON_TOKEN_FALSE: type = "boolean"; break; case JSON_TOKEN_NULL: type = "null"; break; default: elog(ERROR, "unexpected json token: %d", tok); } PG_RETURN_TEXT_P(cstring_to_text(type)); }
void * json_to_bson(text *json) { bson b; void *result; JsonLexContext *lex = makeJsonLexContext(json, true); jsonSemAction sem; json_to_bson_state state; memset(&state, 0, sizeof(json_to_bson_state)); state.bson = &b; state.lex = lex; memset(&sem, 0, sizeof(sem)); sem.semstate = (void *) &state; sem.object_start = jbson_object_start; sem.object_end = jbson_object_end; sem.array_start = jbson_array_start; sem.array_end = jbson_array_end; sem.array_element_start = jbson_array_element_start; sem.scalar = jbson_scalar; sem.object_field_start = jbson_object_field_start; bson_init(&b); pg_parse_json(lex, &sem); pfree(lex->strval->data); pfree(lex->strval); pfree(lex); bson_finish(&b); result = palloc(bson_size(&b)); memcpy(result, b.data, bson_size(&b)); bson_destroy(&b); return result; }
/* * get_json_object_as_hash * * decompose a json object into a hash table. * * Currently doesn't allow anything but a flat object. Should this * change? * * funcname argument allows caller to pass in its name for use in * error messages. */ static HTAB * get_json_object_as_hash(text *json, char *funcname, bool use_json_as_text) { HASHCTL ctl; HTAB *tab; JHashState state; JsonLexContext *lex = makeJsonLexContext(json, true); JsonSemAction sem; memset(&ctl, 0, sizeof(ctl)); ctl.keysize = NAMEDATALEN; ctl.entrysize = sizeof(jsonHashEntry); ctl.hcxt = CurrentMemoryContext; tab = hash_create("json object hashtable", 100, &ctl, HASH_ELEM | HASH_CONTEXT); state = palloc0(sizeof(jhashState)); sem = palloc0(sizeof(jsonSemAction)); state->function_name = funcname; state->hash = tab; state->lex = lex; state->use_json_as_text = use_json_as_text; sem->semstate = (void *) state; sem->array_start = hash_array_start; sem->scalar = hash_scalar; sem->object_field_start = hash_object_field_start; sem->object_field_end = hash_object_field_end; pg_parse_json(lex, sem); return tab; }
static inline Datum each_worker(PG_FUNCTION_ARGS, bool as_text) { text *json = PG_GETARG_TEXT_P(0); JsonLexContext *lex = makeJsonLexContext(json, true); JsonSemAction sem; ReturnSetInfo *rsi; MemoryContext old_cxt; TupleDesc tupdesc; EachState state; state = palloc0(sizeof(eachState)); sem = palloc0(sizeof(jsonSemAction)); rsi = (ReturnSetInfo *) fcinfo->resultinfo; if (!rsi || !IsA(rsi, ReturnSetInfo) || (rsi->allowedModes & SFRM_Materialize) == 0 || rsi->expectedDesc == NULL) ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("set-valued function called in context that " "cannot accept a set"))); rsi->returnMode = SFRM_Materialize; (void) get_call_result_type(fcinfo, NULL, &tupdesc); /* make these in a sufficiently long-lived memory context */ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); state->ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(state->ret_tdesc); state->tuple_store = tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize, false, work_mem); MemoryContextSwitchTo(old_cxt); sem->semstate = (void *) state; sem->array_start = each_array_start; sem->scalar = each_scalar; sem->object_field_start = each_object_field_start; sem->object_field_end = each_object_field_end; state->normalize_results = as_text; state->next_scalar = false; state->lex = lex; state->tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, "json_each temporary cxt", ALLOCSET_DEFAULT_MINSIZE, ALLOCSET_DEFAULT_INITSIZE, ALLOCSET_DEFAULT_MAXSIZE); pg_parse_json(lex, sem); rsi->setResult = state->tuple_store; rsi->setDesc = state->ret_tdesc; PG_RETURN_NULL(); }
/* * get_worker * * common worker for all the json getter functions */ static inline text * get_worker(text *json, char *field, int elem_index, char **tpath, int *ipath, int npath, bool normalize_results) { GetState state; JsonLexContext *lex = makeJsonLexContext(json, true); JsonSemAction sem; /* only allowed to use one of these */ Assert(elem_index < 0 || (tpath == NULL && ipath == NULL && field == NULL)); Assert(tpath == NULL || field == NULL); state = palloc0(sizeof(getState)); sem = palloc0(sizeof(jsonSemAction)); state->lex = lex; /* is it "_as_text" variant? */ state->normalize_results = normalize_results; if (field != NULL) { /* single text argument */ state->search_type = JSON_SEARCH_OBJECT; state->search_term = field; } else if (tpath != NULL) { /* path array argument */ state->search_type = JSON_SEARCH_PATH; state->path = tpath; state->npath = npath; state->current_path = palloc(sizeof(char *) * npath); state->pathok = palloc0(sizeof(bool) * npath); state->pathok[0] = true; state->array_level_index = palloc(sizeof(int) * npath); state->path_level_index = ipath; } else { /* single integer argument */ state->search_type = JSON_SEARCH_ARRAY; state->search_index = elem_index; state->array_index = -1; } sem->semstate = (void *) state; /* * Not all variants need all the semantic routines. only set the ones * that are actually needed for maximum efficiency. */ sem->object_start = get_object_start; sem->array_start = get_array_start; sem->scalar = get_scalar; if (field != NULL || tpath != NULL) { sem->object_field_start = get_object_field_start; sem->object_field_end = get_object_field_end; } if (field == NULL) { sem->array_element_start = get_array_element_start; sem->array_element_end = get_array_element_end; } pg_parse_json(lex, sem); return state->tresult; }
Datum json_object_keys(PG_FUNCTION_ARGS) { FuncCallContext *funcctx; OkeysState state; int i; if (SRF_IS_FIRSTCALL()) { text *json = PG_GETARG_TEXT_P(0); JsonLexContext *lex = makeJsonLexContext(json, true); JsonSemAction sem; MemoryContext oldcontext; funcctx = SRF_FIRSTCALL_INIT(); oldcontext = MemoryContextSwitchTo(funcctx->multi_call_memory_ctx); state = palloc(sizeof(okeysState)); sem = palloc0(sizeof(jsonSemAction)); state->lex = lex; state->result_size = 256; state->result_count = 0; state->sent_count = 0; state->result = palloc(256 * sizeof(char *)); sem->semstate = (void *) state; sem->array_start = okeys_array_start; sem->scalar = okeys_scalar; sem->object_field_start = okeys_object_field_start; /* remainder are all NULL, courtesy of palloc0 above */ pg_parse_json(lex, sem); /* keys are now in state->result */ pfree(lex->strval->data); pfree(lex->strval); pfree(lex); pfree(sem); MemoryContextSwitchTo(oldcontext); funcctx->user_fctx = (void *) state; } funcctx = SRF_PERCALL_SETUP(); state = (OkeysState) funcctx->user_fctx; if (state->sent_count < state->result_count) { char *nxt = state->result[state->sent_count++]; SRF_RETURN_NEXT(funcctx, CStringGetTextDatum(nxt)); } /* cleanup to reduce or eliminate memory leaks */ for (i = 0; i < state->result_count; i++) pfree(state->result[i]); pfree(state->result); pfree(state); SRF_RETURN_DONE(funcctx); }
/* * SQL function json_populate_recordset * * set fields in a set of records from the argument json, * which must be an array of objects. * * similar to json_populate_record, but the tuple-building code * is pushed down into the semantic action handlers so it's done * per object in the array. */ Datum json_populate_recordset(PG_FUNCTION_ARGS) { Oid argtype = get_fn_expr_argtype(fcinfo->flinfo, 0); text *json = PG_GETARG_TEXT_P(1); bool use_json_as_text = PG_GETARG_BOOL(2); ReturnSetInfo *rsi; MemoryContext old_cxt; Oid tupType; int32 tupTypmod; HeapTupleHeader rec; TupleDesc tupdesc; RecordIOData *my_extra; int ncolumns; JsonLexContext *lex; JsonSemAction sem; PopulateRecordsetState state; if (!type_is_rowtype(argtype)) ereport(ERROR, (errcode(ERRCODE_DATATYPE_MISMATCH), errmsg("first argument must be a rowtype"))); rsi = (ReturnSetInfo *) fcinfo->resultinfo; if (!rsi || !IsA(rsi, ReturnSetInfo) || (rsi->allowedModes & SFRM_Materialize) == 0 || rsi->expectedDesc == NULL) ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("set-valued function called in context that " "cannot accept a set"))); rsi->returnMode = SFRM_Materialize; /* * get the tupdesc from the result set info - it must be a record type * because we already checked that arg1 is a record type. */ (void) get_call_result_type(fcinfo, NULL, &tupdesc); state = palloc0(sizeof(populateRecordsetState)); sem = palloc0(sizeof(jsonSemAction)); /* make these in a sufficiently long-lived memory context */ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); state->ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(state->ret_tdesc); state->tuple_store = tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize, false, work_mem); MemoryContextSwitchTo(old_cxt); /* if the json is null send back an empty set */ if (PG_ARGISNULL(1)) PG_RETURN_NULL(); if (PG_ARGISNULL(0)) rec = NULL; else rec = PG_GETARG_HEAPTUPLEHEADER(0); tupType = tupdesc->tdtypeid; tupTypmod = tupdesc->tdtypmod; ncolumns = tupdesc->natts; lex = makeJsonLexContext(json, true); /* * We arrange to look up the needed I/O info just once per series of * calls, assuming the record type doesn't change underneath us. */ my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra; if (my_extra == NULL || my_extra->ncolumns != ncolumns) { fcinfo->flinfo->fn_extra = MemoryContextAlloc(fcinfo->flinfo->fn_mcxt, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData)); my_extra = (RecordIOData *) fcinfo->flinfo->fn_extra; my_extra->record_type = InvalidOid; my_extra->record_typmod = 0; } if (my_extra->record_type != tupType || my_extra->record_typmod != tupTypmod) { MemSet(my_extra, 0, sizeof(RecordIOData) - sizeof(ColumnIOData) + ncolumns * sizeof(ColumnIOData)); my_extra->record_type = tupType; my_extra->record_typmod = tupTypmod; my_extra->ncolumns = ncolumns; } sem->semstate = (void *) state; sem->array_start = populate_recordset_array_start; sem->array_element_start = populate_recordset_array_element_start; sem->scalar = populate_recordset_scalar; sem->object_field_start = populate_recordset_object_field_start; sem->object_field_end = populate_recordset_object_field_end; sem->object_start = populate_recordset_object_start; sem->object_end = populate_recordset_object_end; state->lex = lex; state->my_extra = my_extra; state->rec = rec; state->use_json_as_text = use_json_as_text; state->fn_mcxt = fcinfo->flinfo->fn_mcxt; pg_parse_json(lex, sem); rsi->setResult = state->tuple_store; rsi->setDesc = state->ret_tdesc; PG_RETURN_NULL(); }
/* * SQL function json_array_elements * * get the elements from a json array * * a lot of this processing is similar to the json_each* functions */ Datum json_array_elements(PG_FUNCTION_ARGS) { text *json = PG_GETARG_TEXT_P(0); /* elements doesn't need any escaped strings, so use false here */ JsonLexContext *lex = makeJsonLexContext(json, false); JsonSemAction sem; ReturnSetInfo *rsi; MemoryContext old_cxt; TupleDesc tupdesc; ElementsState state; state = palloc0(sizeof(elementsState)); sem = palloc0(sizeof(jsonSemAction)); rsi = (ReturnSetInfo *) fcinfo->resultinfo; if (!rsi || !IsA(rsi, ReturnSetInfo) || (rsi->allowedModes & SFRM_Materialize) == 0 || rsi->expectedDesc == NULL) ereport(ERROR, (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), errmsg("set-valued function called in context that " "cannot accept a set"))); rsi->returnMode = SFRM_Materialize; /* it's a simple type, so don't use get_call_result_type() */ tupdesc = rsi->expectedDesc; /* make these in a sufficiently long-lived memory context */ old_cxt = MemoryContextSwitchTo(rsi->econtext->ecxt_per_query_memory); state->ret_tdesc = CreateTupleDescCopy(tupdesc); BlessTupleDesc(state->ret_tdesc); state->tuple_store = tuplestore_begin_heap(rsi->allowedModes & SFRM_Materialize, false, work_mem); MemoryContextSwitchTo(old_cxt); sem->semstate = (void *) state; sem->object_start = elements_object_start; sem->scalar = elements_scalar; sem->array_element_start = elements_array_element_start; sem->array_element_end = elements_array_element_end; state->lex = lex; state->tmp_cxt = AllocSetContextCreate(CurrentMemoryContext, "json_array_elements temporary cxt", ALLOCSET_DEFAULT_MINSIZE, ALLOCSET_DEFAULT_INITSIZE, ALLOCSET_DEFAULT_MAXSIZE); pg_parse_json(lex, sem); rsi->setResult = state->tuple_store; rsi->setDesc = state->ret_tdesc; PG_RETURN_NULL(); }
/* * Turn a Datum into jsonb, adding it to the result JsonbInState. * * tcategory and outfuncoid are from a previous call to json_categorize_type, * except that if is_null is true then they can be invalid. * * If key_scalar is true, the value is stored as a key, so insist * it's of an acceptable type, and force it to be a jbvString. */ static void datum_to_jsonb(Datum val, bool is_null, JsonbInState *result, JsonbTypeCategory tcategory, Oid outfuncoid, bool key_scalar) { char *outputstr; bool numeric_error; JsonbValue jb; bool scalar_jsonb = false; check_stack_depth(); /* Convert val to a JsonbValue in jb (in most cases) */ if (is_null) { Assert(!key_scalar); jb.type = jbvNull; } else if (key_scalar && (tcategory == JSONBTYPE_ARRAY || tcategory == JSONBTYPE_COMPOSITE || tcategory == JSONBTYPE_JSON || tcategory == JSONBTYPE_JSONB || tcategory == JSONBTYPE_JSONCAST)) { ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("key value must be scalar, not array, composite, or json"))); } else { if (tcategory == JSONBTYPE_JSONCAST) val = OidFunctionCall1(outfuncoid, val); switch (tcategory) { case JSONBTYPE_ARRAY: array_to_jsonb_internal(val, result); break; case JSONBTYPE_COMPOSITE: composite_to_jsonb(val, result); break; case JSONBTYPE_BOOL: if (key_scalar) { outputstr = DatumGetBool(val) ? "true" : "false"; jb.type = jbvString; jb.val.string.len = strlen(outputstr); jb.val.string.val = outputstr; } else { jb.type = jbvBool; jb.val.boolean = DatumGetBool(val); } break; case JSONBTYPE_NUMERIC: outputstr = OidOutputFunctionCall(outfuncoid, val); if (key_scalar) { /* always quote keys */ jb.type = jbvString; jb.val.string.len = strlen(outputstr); jb.val.string.val = outputstr; } else { /* * Make it numeric if it's a valid JSON number, otherwise * a string. Invalid numeric output will always have an * 'N' or 'n' in it (I think). */ numeric_error = (strchr(outputstr, 'N') != NULL || strchr(outputstr, 'n') != NULL); if (!numeric_error) { jb.type = jbvNumeric; jb.val.numeric = DatumGetNumeric(DirectFunctionCall3(numeric_in, CStringGetDatum(outputstr), 0, -1)); pfree(outputstr); } else { jb.type = jbvString; jb.val.string.len = strlen(outputstr); jb.val.string.val = outputstr; } } break; case JSONBTYPE_DATE: { DateADT date; struct pg_tm tm; char buf[MAXDATELEN + 1]; date = DatumGetDateADT(val); /* Same as date_out(), but forcing DateStyle */ if (DATE_NOT_FINITE(date)) EncodeSpecialDate(date, buf); else { j2date(date + POSTGRES_EPOCH_JDATE, &(tm.tm_year), &(tm.tm_mon), &(tm.tm_mday)); EncodeDateOnly(&tm, USE_XSD_DATES, buf); } jb.type = jbvString; jb.val.string.len = strlen(buf); jb.val.string.val = pstrdup(buf); } break; case JSONBTYPE_TIMESTAMP: { Timestamp timestamp; struct pg_tm tm; fsec_t fsec; char buf[MAXDATELEN + 1]; timestamp = DatumGetTimestamp(val); /* Same as timestamp_out(), but forcing DateStyle */ if (TIMESTAMP_NOT_FINITE(timestamp)) EncodeSpecialTimestamp(timestamp, buf); else if (timestamp2tm(timestamp, NULL, &tm, &fsec, NULL, NULL) == 0) EncodeDateTime(&tm, fsec, false, 0, NULL, USE_XSD_DATES, buf); else ereport(ERROR, (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), errmsg("timestamp out of range"))); jb.type = jbvString; jb.val.string.len = strlen(buf); jb.val.string.val = pstrdup(buf); } break; case JSONBTYPE_TIMESTAMPTZ: { TimestampTz timestamp; struct pg_tm tm; int tz; fsec_t fsec; const char *tzn = NULL; char buf[MAXDATELEN + 1]; timestamp = DatumGetTimestampTz(val); /* Same as timestamptz_out(), but forcing DateStyle */ if (TIMESTAMP_NOT_FINITE(timestamp)) EncodeSpecialTimestamp(timestamp, buf); else if (timestamp2tm(timestamp, &tz, &tm, &fsec, &tzn, NULL) == 0) EncodeDateTime(&tm, fsec, true, tz, tzn, USE_XSD_DATES, buf); else ereport(ERROR, (errcode(ERRCODE_DATETIME_VALUE_OUT_OF_RANGE), errmsg("timestamp out of range"))); jb.type = jbvString; jb.val.string.len = strlen(buf); jb.val.string.val = pstrdup(buf); } break; case JSONBTYPE_JSONCAST: case JSONBTYPE_JSON: { /* parse the json right into the existing result object */ JsonLexContext *lex; JsonSemAction sem; text *json = DatumGetTextP(val); lex = makeJsonLexContext(json, true); memset(&sem, 0, sizeof(sem)); sem.semstate = (void *) result; sem.object_start = jsonb_in_object_start; sem.array_start = jsonb_in_array_start; sem.object_end = jsonb_in_object_end; sem.array_end = jsonb_in_array_end; sem.scalar = jsonb_in_scalar; sem.object_field_start = jsonb_in_object_field_start; pg_parse_json(lex, &sem); } break; case JSONBTYPE_JSONB: { Jsonb *jsonb = DatumGetJsonb(val); JsonbIterator *it; it = JsonbIteratorInit(&jsonb->root); if (JB_ROOT_IS_SCALAR(jsonb)) { (void) JsonbIteratorNext(&it, &jb, true); Assert(jb.type == jbvArray); (void) JsonbIteratorNext(&it, &jb, true); scalar_jsonb = true; } else { JsonbIteratorToken type; while ((type = JsonbIteratorNext(&it, &jb, false)) != WJB_DONE) { if (type == WJB_END_ARRAY || type == WJB_END_OBJECT || type == WJB_BEGIN_ARRAY || type == WJB_BEGIN_OBJECT) result->res = pushJsonbValue(&result->parseState, type, NULL); else result->res = pushJsonbValue(&result->parseState, type, &jb); } } } break; default: outputstr = OidOutputFunctionCall(outfuncoid, val); jb.type = jbvString; jb.val.string.len = checkStringLen(strlen(outputstr)); jb.val.string.val = outputstr; break; } } /* Now insert jb into result, unless we did it recursively */ if (!is_null && !scalar_jsonb && tcategory >= JSONBTYPE_JSON && tcategory <= JSONBTYPE_JSONCAST) { /* work has been done recursively */ return; } else if (result->parseState == NULL) { /* single root scalar */ JsonbValue va; va.type = jbvArray; va.val.array.rawScalar = true; va.val.array.nElems = 1; result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_ARRAY, &va); result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb); result->res = pushJsonbValue(&result->parseState, WJB_END_ARRAY, NULL); } else { JsonbValue *o = &result->parseState->contVal; switch (o->type) { case jbvArray: result->res = pushJsonbValue(&result->parseState, WJB_ELEM, &jb); break; case jbvObject: result->res = pushJsonbValue(&result->parseState, key_scalar ? WJB_KEY : WJB_VALUE, &jb); break; default: elog(ERROR, "unexpected parent of nested structure"); } } }