From 50cc811f30b895baa8f9449f8c3db88f318c15b1 Mon Sep 17 00:00:00 2001 From: Andrew Dunstan Date: Tue, 14 Jul 2020 12:50:08 -0400 Subject: [PATCH 2/2] SQLJSON constructors v48 --- .../pg_stat_statements/pg_stat_statements.c | 12 + doc/src/sgml/func.sgml | 990 ++++++++++++++++++ src/backend/executor/execExpr.c | 64 ++ src/backend/executor/execExprInterp.c | 32 + src/backend/nodes/copyfuncs.c | 161 +++ src/backend/nodes/equalfuncs.c | 18 + src/backend/nodes/makefuncs.c | 15 + src/backend/nodes/nodeFuncs.c | 149 +++ src/backend/nodes/outfuncs.c | 18 + src/backend/nodes/readfuncs.c | 22 + src/backend/parser/gram.y | 254 ++++- src/backend/parser/parse_expr.c | 539 ++++++++++ src/backend/parser/parse_target.c | 13 + src/backend/parser/parser.c | 16 + src/backend/utils/adt/json.c | 349 +++++- src/backend/utils/adt/jsonb.c | 324 +++++- src/backend/utils/adt/ruleutils.c | 208 +++- src/include/catalog/pg_aggregate.dat | 22 + src/include/catalog/pg_proc.dat | 70 ++ src/include/executor/execExpr.h | 12 + src/include/nodes/makefuncs.h | 1 + src/include/nodes/nodes.h | 7 + src/include/nodes/parsenodes.h | 94 +- src/include/nodes/primnodes.h | 25 + src/include/parser/kwlist.h | 6 + src/include/utils/json.h | 5 + src/include/utils/jsonb.h | 5 + src/interfaces/ecpg/preproc/parse.pl | 2 + src/interfaces/ecpg/preproc/parser.c | 14 + src/test/regress/expected/opr_sanity.out | 6 +- src/test/regress/expected/sqljson.out | 742 +++++++++++++ src/test/regress/parallel_schedule | 2 +- src/test/regress/serial_schedule | 1 + src/test/regress/sql/opr_sanity.sql | 6 +- src/test/regress/sql/sqljson.sql | 282 +++++ 35 files changed, 4364 insertions(+), 122 deletions(-) create mode 100644 src/test/regress/expected/sqljson.out create mode 100644 src/test/regress/sql/sqljson.sql diff --git a/contrib/pg_stat_statements/pg_stat_statements.c b/contrib/pg_stat_statements/pg_stat_statements.c index 7d7752ef61..1b27d00458 100644 --- a/contrib/pg_stat_statements/pg_stat_statements.c +++ b/contrib/pg_stat_statements/pg_stat_statements.c @@ -3112,6 +3112,18 @@ JumbleExpr(pgssJumbleState *jstate, Node *node) JumbleExpr(jstate, (Node *) expr->format); } break; + case T_JsonCtorExpr: + { + JsonCtorExpr *ctor = (JsonCtorExpr *) node; + + JumbleExpr(jstate, (Node *) ctor->func); + JumbleExpr(jstate, (Node *) ctor->coercion); + JumbleExpr(jstate, (Node *) ctor->returning); + APP_JUMB(ctor->type); + APP_JUMB(ctor->unique); + APP_JUMB(ctor->absent_on_null); + } + break; case T_List: foreach(temp, (List *) node) { diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml index cc83d6652e..466c300ff5 100644 --- a/doc/src/sgml/func.sgml +++ b/doc/src/sgml/func.sgml @@ -16844,6 +16844,850 @@ $ ? (@ like_regex "^\\d+$") + + + SQL/JSON Functions and Expressions + + SQL/JSON + functions and expressions + + + + To provide native support for JSON data types within the SQL environment, + PostgreSQL implements the + SQL/JSON data model. + This model comprises sequences of items. Each item can hold SQL scalar values, + with an additional SQL/JSON null value, and composite data structures that use JSON + arrays and objects. + + + + SQL/JSON enables you to handle JSON data alongside regular SQL data, + with transaction support: + + + + + + Upload JSON data into a relational database and store it in + regular SQL columns as character or binary strings. + + + + + Generate JSON objects and arrays from relational data. + + + + + Query JSON data using SQL/JSON query functions and SQL/JSON path + language expressions. + + + + + + Producing JSON Content + + + PostgreSQL provides several functions + that generate JSON data. Taking values of SQL types as input, these + functions construct JSON objects or JSON arrays represented as + SQL character or binary strings. + + + + + + + + + + + + + + + + + + + + + + + + + + + + JSON_OBJECT + create a JSON object + + + + +JSON_OBJECT ( + [ { key_expression { VALUE | ':' } + value_expression [ FORMAT JSON [ ENCODING UTF8 ] ] }[, ...] ] + [ { NULL | ABSENT } ON NULL ] + [ { WITH | WITHOUT } UNIQUE [ KEYS ] ] + [ RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] +) + + + + + + Description + + + JSON_OBJECT function generates a JSON + object from SQL or JSON data. + + + + + Parameters + + + + + key_expression { VALUE | ':' } + value_expression [ FORMAT JSON [ ENCODING UTF8 ] ] + + + + The input clause that provides the data for constructing a JSON object: + + + + + key_expression is a scalar expression defining the + JSON key, which is implicitly converted + to the text type. + The provided expression cannot be NULL or belong to a type that has a cast to json. + + + + + value_expression is an expression + that provides the input for the JSON value. + + + + + The optional FORMAT clause is provided to conform to the SQL/JSON standard. + + + + + You must use a colon or the VALUE keyword as a delimiter between + the key and the value. Multiple key/value pairs are separated by commas. + + + + + + + { NULL | ABSENT } ON NULL + + + + Defines whether NULL values are allowed in the constructed + JSON object: + + + + NULL + + + Default. NULL values are allowed. + + + + + ABSENT + + + If the value is NULL, + the corresponding key/value pair is omitted from the generated + JSON object. + + + + + + + + + + { WITH | WITHOUT } UNIQUE [ KEYS ] + + + Defines whether duplicate keys are allowed: + + + + WITHOUT + + + Default. The constructed + JSON object can contain duplicate keys. + + + + + WITH + + + Duplicate keys are not allowed. + If the input data contains duplicate keys, an error is returned. + This check is performed before removing JSON items with NULL values. + + + + + + Optionally, you can add the KEYS keyword for semantic clarity. + + + + + + + RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] + + + + The output clause that specifies the type of the generated JSON object. + For details, see . + + + + + + + + + Notes + Alternatively, you can construct JSON objects by using + PostgreSQL-specific json_build_object()/ + jsonb_build_object() functions. + See for details. + + + + + Examples + + Construct a JSON object from the provided key/value pairs of various types: + + +SELECT JSON_OBJECT( +-- scalar JSON types + 'key1': 'string', + 'key2': '[1, 2]', + 'key3' VALUE 123, -- alternative syntax for key-value delimiter + 'key4': NULL, +-- other types + 'key5': ARRAY[1, 2, 3], -- postgres array + 'key6': jsonb '{"a": ["b", 1]}', -- composite json/jsonb + 'key7': date '2017-09-30', -- datetime type + 'key8': row(1, 'a'), -- row type + 'key9': '[1, 2]' FORMAT JSON, -- same value as for key2, but with FORMAT +-- key can be an expression + 'key' || 'last' : TRUE +ABSENT ON NULL) AS json; + json +---------------------------------------------------- +{"key1" : "string", "key2" : "[1, 2]", "key3" : 123, + "key5" : [1,2,3], "key6" : {"a": ["b", 1]}, + "key7" : "2017-09-30", "key8" : {"f1":1,"f2":"a"}, + "key9" : [1, 2], "keylast" : true} +(1 row) + + + + From the films table, select some data + about the films distributed by Paramount Pictures + (did = 103) and return JSON objects: + + +SELECT +JSON_OBJECT( + 'code' VALUE f.code, + 'title' VALUE f.title, + 'did' VALUE f.did +) AS paramount +FROM films AS f +WHERE f.did = 103; + paramount +---------------------------------------------------- +{"code" : "P_301", "title" : "Vertigo", "did" : 103} +{"code" : "P_302", "title" : "Becket", "did" : 103} +{"code" : "P_303", "title" : "48 Hrs", "did" : 103} +(3 rows) + + + + + + + JSON_OBJECTAGG + create a JSON object as an aggregate of the provided data + + + +JSON_OBJECTAGG ( + [ { key_expression { VALUE | ':' } value_expression } ] + [ { NULL | ABSENT } ON NULL ] + [ { WITH | WITHOUT } UNIQUE [ KEYS ] ] + [ RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] +) + + + + + + Description + + + JSON_OBJECTAGG function aggregates the provided data + into a JSON object. You can use this function to combine values + stored in different table columns into pairs. If you specify a GROUP BY + or an ORDER BY clause, this function returns a separate JSON object + for each table row. + + + + + Parameters + + + + + key_expression { VALUE | ':' } value_expression + + + + + The input clause that provides the data to be aggregated as a JSON object: + + + + + key_expression is a scalar expression defining the + JSON key, which is implicitly converted + to the text type. + The provided expression cannot be NULL or belong to a type that has a cast to json. + + + + + value_expression is an expression + that provides the input for the JSON value preceded by its type. + For JSON scalar types, you can omit the type. + + + + The input value of the bytea type must be stored in UTF8 + and contain a valid UTF8 string. Otherwise, an error occurs. + PostgreSQL currently supports only UTF8. + + + + + + You must use a colon or the VALUE keyword as a delimiter between + keys and values. Multiple key/value pairs are separated by commas. + + + + + + + { NULL | ABSENT } ON NULL + + + + Defines whether NULL values are allowed in the constructed + JSON object: + + + + NULL + + + Default. NULL values are allowed. + + + + + ABSENT + + + If the value is NULL, + the corresponding key/value pair is omitted from the generated + JSON object. + + + + + + + + + + { WITH | WITHOUT } UNIQUE [ KEYS ] + + + Defines whether duplicate keys are allowed: + + + + WITHOUT + + + Default. The constructed + JSON object can contain duplicate keys. + + + + + WITH + + + Duplicate keys are not allowed. + If the input data contains duplicate keys, an error is returned. + This check is performed before removing JSON items with NULL values. + + + + + + Optionally, you can add the KEYS keyword for semantic clarity. + + + + + + + RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] + + + + The output clause that specifies the type of the generated JSON object. + For details, see . + + + + + + + + + Notes + Alternatively, you can create JSON objects by using + PostgreSQL-specific json_object_agg()/ + jsonb_object_agg() aggregate functions. + See for details. + + + + + Examples + + + For films with did = 103, aggregate key/value pairs + of film genre (f.kind) and title (f.title) + into a single object: + + +SELECT +JSON_OBJECTAGG( + f.kind VALUE f.title) + AS films_list +FROM films AS f +where f.did = 103; + films_list +---------------------------------------------------- +{ "Action" : "Vertigo", "Drama" : "Becket", "Action" : "48 Hrs" } + + + + Return the same object as jsonb. Note that only a single film of + the action genre is included as the jsonb type does not allow duplicate keys. + + +SELECT +JSON_OBJECTAGG( + f.kind VALUE f.title + RETURNING jsonb) +AS films_list +FROM films AS f +where f.did = 103; + films_list +---------------------------------------------------- +{"Drama": "Becket", "Action": "48 Hrs"} + + + + Return objects of film titles and length, grouped by the film genre: + + +SELECT + f.kind, + JSON_OBJECTAGG( + f.title VALUE f.len +) AS films_list +FROM films AS f +GROUP BY f.kind; + + kind | films_list +-------------+---------------------------------- +Musical | { "West Side Story" : "02:32:00", "The King and I" : "02:13:00", "Bed Knobs and Broomsticks" : "01:57:00" } +Romantic | { "The African Queen" : "01:43:00", "Une Femme est une Femme" : "01:25:00", "Storia di una donna" : "01:30:00" } +Comedy | { "Bananas" : "01:22:00", "There's a Girl in my Soup" : "01:36:00" } +Drama | { "The Third Man" : "01:44:00", "Becket" : "02:28:00", "War and Peace" : "05:57:00", "Yojimbo" : "01:50:00", "Das Boot" : "02:29:00" } +Action | { "Vertigo" : "02:08:00", "48 Hrs" : "01:37:00", "Taxi Driver" : "01:54:00", "Absence of Malice" : "01:55:00" } +(5 rows) + + + + + + + JSON_ARRAY + create a JSON array + + + +JSON_ARRAY ( + [ { value_expression [ FORMAT JSON ] } [, ...] ] + [ { NULL | ABSENT } ON NULL ] + [ RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] +) + +JSON_ARRAY ( + [ query_expression ] + [ RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] +) + + + + + Description + + + JSON_ARRAY function constructs a JSON array from + the provided SQL or JSON data. + + + + + Parameters + + + + + value_expression + + + + + The input clause that provides the data for constructing a JSON array. + The value_expression is an expression + that provides the input for the JSON value preceded by its type. + For JSON scalar types, you can omit the type. + + + + The input value of the bytea type must be stored in UTF8 + and contain a valid UTF8 string. Otherwise, an error occurs. + PostgreSQL currently supports only UTF8. + + + + + + + + + query_expression + + + + An SQL query that provides the data for constructing a JSON array. + The query must return a single column that holds the values to be + used in the array. + + + + + + + { NULL | ABSENT } ON NULL + + + + Defines whether NULL values are allowed in the generated JSON array: + + + + NULL + + + NULL values are allowed. + + + + + ABSENT + + + Default. If the value is NULL, + the corresponding key/value pair is omitted from the generated + JSON object. + + + + + + This clause is only supported for arrays built from an explicit list of values. + If you are using an SQL query to generate an array, NULL values are always + omitted. + + + + + + + RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] + + + + The output clause that specifies the return type of the constructed JSON array. + For details, see . + + + + + + + + + Notes + Alternatively, you can create JSON arrays by using + PostgreSQL-specific json_build_array()/ + jsonb_build_array() functions. + See for details. + + + + + Examples + + From the films table, select some data + about the films distributed by Paramount Pictures + (did = 103) and return JSON arrays: + + +SELECT +JSON_ARRAY( + f.code, + f.title, + f.did +) AS films +FROM films AS f +WHERE f.did = 103; + films +---------------------------------------------------- +["code" : "P_301", "title" : "Vertigo", "did" : 103] +["code" : "P_302", "title" : "Becket", "did" : 103] +["code" : "P_303", "title" : "48 Hrs", "did" : 103] +(3 rows) + + + Construct a JSON array from the list of film titles returned from the + films table by a subquery: + + +SELECT +JSON_ARRAY( + SELECT + f.title +FROM films AS f +where f.did = 103) +AS film_titles; + film_titles +---------------------------------------------------- +["Vertigo", "Becket", "48 Hrs"] +(1 row) + + + + + + + JSON_ARRAYAGG + aggregate a JSON array + + + +JSON_ARRAYAGG ( + [ value_expression ] + [ ORDER BY sort_expression ] + [ { NULL | ABSENT } ON NULL ] + [ RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] +) + + + + + + Description + + + JSON_ARRAYAGG function aggregates the provided SQL + or JSON data into a JSON array. + + + + + Parameters + + + + + value_expression + + + + + The input clause that provides the input data to be aggregated as a JSON array. + The value_expression + can be a value or a query returning the values to be used as input in array construction. + You can provide multiple input values separated by commas. + + + + + + + ORDER BY + + + + Sorts the input data to be aggregated as a JSON array. + For details on the exact syntax of the ORDER BY clause, see . + + + + + + + { NULL | ABSENT } ON NULL + + + + Defines whether NULL values are allowed in the constructed array: + + + + NULLNULL values are allowed. + + + + + ABSENT (default) — NULL + values are omitted from the generated array. + + + + + + + + + + RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] + + + + The output clause that specifies the return type of the constructed JSON array. + For details, see . + + + + + + + + + Notes + Alternatively, you can create JSON arrays by using + PostgreSQL-specific json_agg()/ + jsonb_agg() functions. + See for details. + + + + + Examples + + Construct an array of film titles sorted in alphabetical order: + + +SELECT +JSON_ARRAYAGG( + f.title +ORDER BY f.title ASC) AS film_titles +FROM films AS f; + film_titles +---------------------------------------------------- +["48 Hrs", "Absence of Malice", "Bananas", "Becket", "Bed Knobs and Broomsticks", "Das Boot", "Storia di una donna", "Taxi Driver", "The African Queen", "The King and I", "There's a Girl in my Soup", "The Third Man", "Une Femme est une Femme", "Vertigo", "War and Peace", "West Side Story", "Yojimbo"] +(1 row) + + + + + + + SQL/JSON Common Clauses + + + SQL/JSON Output Clause + + + + + RETURNING data_type [ FORMAT JSON [ ENCODING UTF8 ] ] + + + + The output clause that specifies the return type of the generated + JSON object. Out of the box, PostgreSQL + supports the following types: json, jsonb, + bytea, and character string types (text, char, + varchar, and nchar). + To use other types, you must create the CAST from json for this type. + By default, the json type is returned. + + + The optional FORMAT clause is provided to conform to the SQL/JSON standard. + + + The output clause is common for both constructor and query SQL/JSON functions. + + + + + + + + + @@ -18566,6 +19410,40 @@ SELECT NULLIF(value, '(none)') ... + json_agg_strict + + json_agg_strict(expression) + + + any + + + json + + No + aggregates values, skipping nulls, as a JSON array + + + + + + jsonb_agg_strict + + jsonb_agg_strict(expression) + + + any + + + jsonb + + No + aggregates values, skipping nulls, as a JSON array + + + + + json_object_agg json_object_agg ( key @@ -18594,6 +19472,118 @@ SELECT NULLIF(value, '(none)') ... + json_object_agg_strict + + json_object_agg_strict(name, value) + + + (any, any) + + + json + + No + aggregates name/value pairs as a JSON object; + null values are skipped, names can not be null + + + + + + jsonb_object_agg_strict + + jsonb_object_agg_strict(name, value) + + + (any, any) + + + jsonb + + No + aggregates name/value pairs as a JSON object; + null values are skipped, names can not be null + + + + + + json_object_agg_unique + + json_object_agg_unique(name, value) + + + (any, any) + + + json + + No + aggregates name/value pairs as a JSON object; + in case of duplicate keys error is thrown; + values can be null, but not names + + + + + + jsonb_object_agg_unique + + jsonb_object_agg_unique(name, value) + + + (any, any) + + + jsonb + + No + aggregates name/value pairs as a JSON object; + in case of duplicate keys error is thrown; + values can be null, but not names + + + + + + json_object_agg_unique_strict + + json_object_agg_unique_strict(name, value) + + + (any, any) + + + json + + No + aggregates name/value pairs as a JSON object; + in case of duplicate keys error is thrown; + null values are skipped, names can not be null + + + + + + jsonb_object_agg_unique_strict + + jsonb_object_agg_unique_strict(name, value) + + + (any, any) + + + jsonb + + No + aggregates name/value pairs as a JSON object; + in case of duplicate keys error is thrown; + null values are skipped, names can not be null + + + + + max max ( see text ) diff --git a/src/backend/executor/execExpr.c b/src/backend/executor/execExpr.c index 8063c061f2..c562df7787 100644 --- a/src/backend/executor/execExpr.c +++ b/src/backend/executor/execExpr.c @@ -2130,6 +2130,70 @@ ExecInitExprRec(Expr *node, ExprState *state, break; } + case T_JsonCtorExpr: + { + JsonCtorExpr *ctor = (JsonCtorExpr *) node; + List *args = ctor->args; + ListCell *lc; + int nargs = list_length(args); + int argno = 0; + + if (ctor->func) + { + ExecInitExprRec(ctor->func, state, resv, resnull); + } + else + { + + scratch.opcode = EEOP_JSON_CTOR; + scratch.d.json_ctor.ctor = ctor; + scratch.d.json_ctor.arg_values = palloc(sizeof(Datum) * nargs); + scratch.d.json_ctor.arg_nulls = palloc(sizeof(bool) * nargs); + scratch.d.json_ctor.arg_types = palloc(sizeof(Oid) * nargs); + scratch.d.json_ctor.nargs = nargs; + + foreach(lc, args) + { + Expr *arg = (Expr *) lfirst(lc); + + scratch.d.json_ctor.arg_types[argno] = exprType((Node *) arg); + + if (IsA(arg, Const)) + { + /* Don't evaluate const arguments every round */ + Const *con = (Const *) arg; + + scratch.d.json_ctor.arg_values[argno] = con->constvalue; + scratch.d.json_ctor.arg_nulls[argno] = con->constisnull; + } + else + { + ExecInitExprRec(arg, state, + &scratch.d.json_ctor.arg_values[argno], + &scratch.d.json_ctor.arg_nulls[argno]); + } + argno++; + } + + ExprEvalPushStep(state, &scratch); + } + + if (ctor->coercion) + { + Datum *innermost_caseval = state->innermost_caseval; + bool *innermost_isnull = state->innermost_casenull; + + state->innermost_caseval = resv; + state->innermost_casenull = resnull; + + ExecInitExprRec(ctor->coercion, state, resv, resnull); + + state->innermost_caseval = innermost_caseval; + state->innermost_casenull = innermost_isnull; + } + } + break; + default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(node)); diff --git a/src/backend/executor/execExprInterp.c b/src/backend/executor/execExprInterp.c index b812bbacee..f7ab0d974a 100644 --- a/src/backend/executor/execExprInterp.c +++ b/src/backend/executor/execExprInterp.c @@ -71,6 +71,8 @@ #include "utils/date.h" #include "utils/datum.h" #include "utils/expandedrecord.h" +#include "utils/json.h" +#include "utils/jsonb.h" #include "utils/lsyscache.h" #include "utils/memutils.h" #include "utils/timestamp.h" @@ -432,6 +434,7 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull) &&CASE_EEOP_WINDOW_FUNC, &&CASE_EEOP_SUBPLAN, &&CASE_EEOP_ALTERNATIVE_SUBPLAN, + &&CASE_EEOP_JSON_CTOR, &&CASE_EEOP_AGG_STRICT_DESERIALIZE, &&CASE_EEOP_AGG_DESERIALIZE, &&CASE_EEOP_AGG_STRICT_INPUT_CHECK_ARGS, @@ -1544,6 +1547,35 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull) EEO_NEXT(); } + EEO_CASE(EEOP_JSON_CTOR) + { + Datum res; + bool is_jsonb = op->d.json_ctor.ctor->returning->format->format == JS_FORMAT_JSONB; + + if (op->d.json_ctor.ctor->type == JSCTOR_JSON_ARRAY) + res = (is_jsonb ? + jsonb_build_array_worker : + json_build_array_worker)(op->d.json_ctor.nargs, + op->d.json_ctor.arg_values, + op->d.json_ctor.arg_nulls, + op->d.json_ctor.arg_types, + op->d.json_ctor.ctor->absent_on_null); + else + res = (is_jsonb ? + jsonb_build_object_worker : + json_build_object_worker)(op->d.json_ctor.nargs, + op->d.json_ctor.arg_values, + op->d.json_ctor.arg_nulls, + op->d.json_ctor.arg_types, + op->d.json_ctor.ctor->absent_on_null, + op->d.json_ctor.ctor->unique); + + *op->resvalue = res; + *op->resnull = false; + + EEO_NEXT(); + } + /* evaluate a strict aggregate deserialization function */ EEO_CASE(EEOP_AGG_STRICT_DESERIALIZE) { diff --git a/src/backend/nodes/copyfuncs.c b/src/backend/nodes/copyfuncs.c index 446614c835..4b35750b04 100644 --- a/src/backend/nodes/copyfuncs.c +++ b/src/backend/nodes/copyfuncs.c @@ -2294,6 +2294,143 @@ _copyJsonValueExpr(const JsonValueExpr *from) return newnode; } +/* + * _copyJsonCtorExpr + */ +static JsonCtorExpr * +_copyJsonCtorExpr(const JsonCtorExpr *from) +{ + JsonCtorExpr *newnode = makeNode(JsonCtorExpr); + + COPY_SCALAR_FIELD(type); + COPY_NODE_FIELD(args); + COPY_NODE_FIELD(func); + COPY_NODE_FIELD(coercion); + COPY_NODE_FIELD(returning); + COPY_SCALAR_FIELD(absent_on_null); + COPY_SCALAR_FIELD(unique); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonKeyValue + */ +static JsonKeyValue * +_copyJsonKeyValue(const JsonKeyValue *from) +{ + JsonKeyValue *newnode = makeNode(JsonKeyValue); + + COPY_NODE_FIELD(key); + COPY_NODE_FIELD(value); + + return newnode; +} + +/* + * _copyJsonObjectCtor + */ +static JsonObjectCtor * +_copyJsonObjectCtor(const JsonObjectCtor *from) +{ + JsonObjectCtor *newnode = makeNode(JsonObjectCtor); + + COPY_NODE_FIELD(exprs); + COPY_NODE_FIELD(output); + COPY_SCALAR_FIELD(absent_on_null); + COPY_SCALAR_FIELD(unique); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonObjectAgg + */ +static JsonObjectAgg * +_copyJsonObjectAgg(const JsonObjectAgg *from) +{ + JsonObjectAgg *newnode = makeNode(JsonObjectAgg); + + COPY_NODE_FIELD(ctor.output); + COPY_NODE_FIELD(ctor.agg_filter); + COPY_NODE_FIELD(ctor.agg_order); + COPY_NODE_FIELD(ctor.over); + COPY_LOCATION_FIELD(ctor.location); + COPY_NODE_FIELD(arg); + COPY_SCALAR_FIELD(absent_on_null); + COPY_SCALAR_FIELD(unique); + + return newnode; +} + +/* + * _copyJsonOutput + */ +static JsonOutput * +_copyJsonOutput(const JsonOutput *from) +{ + JsonOutput *newnode = makeNode(JsonOutput); + + COPY_NODE_FIELD(typeName); + COPY_NODE_FIELD(returning); + + return newnode; +} + +/* + * _copyJsonArrayCtor + */ +static JsonArrayCtor * +_copyJsonArrayCtor(const JsonArrayCtor *from) +{ + JsonArrayCtor *newnode = makeNode(JsonArrayCtor); + + COPY_NODE_FIELD(exprs); + COPY_NODE_FIELD(output); + COPY_SCALAR_FIELD(absent_on_null); + COPY_LOCATION_FIELD(location); + + return newnode; +} + +/* + * _copyJsonArrayAgg + */ +static JsonArrayAgg * +_copyJsonArrayAgg(const JsonArrayAgg *from) +{ + JsonArrayAgg *newnode = makeNode(JsonArrayAgg); + + COPY_NODE_FIELD(ctor.output); + COPY_NODE_FIELD(ctor.agg_filter); + COPY_NODE_FIELD(ctor.agg_order); + COPY_NODE_FIELD(ctor.over); + COPY_LOCATION_FIELD(ctor.location); + COPY_NODE_FIELD(arg); + COPY_SCALAR_FIELD(absent_on_null); + + return newnode; +} + +/* + * _copyJsonArrayQueryCtor + */ +static JsonArrayQueryCtor * +_copyJsonArrayQueryCtor(const JsonArrayQueryCtor *from) +{ + JsonArrayQueryCtor *newnode = makeNode(JsonArrayQueryCtor); + + COPY_NODE_FIELD(query); + COPY_NODE_FIELD(output); + COPY_NODE_FIELD(format); + COPY_SCALAR_FIELD(absent_on_null); + COPY_LOCATION_FIELD(location); + + return newnode; +} + /* **************************************************************** * pathnodes.h copy functions * @@ -5200,6 +5337,30 @@ copyObjectImpl(const void *from) case T_JsonValueExpr: retval = _copyJsonValueExpr(from); break; + case T_JsonKeyValue: + retval = _copyJsonKeyValue(from); + break; + case T_JsonCtorExpr: + retval = _copyJsonCtorExpr(from); + break; + case T_JsonObjectCtor: + retval = _copyJsonObjectCtor(from); + break; + case T_JsonObjectAgg: + retval = _copyJsonObjectAgg(from); + break; + case T_JsonOutput: + retval = _copyJsonOutput(from); + break; + case T_JsonArrayCtor: + retval = _copyJsonArrayCtor(from); + break; + case T_JsonArrayQueryCtor: + retval = _copyJsonArrayQueryCtor(from); + break; + case T_JsonArrayAgg: + retval = _copyJsonArrayAgg(from); + break; /* * RELATION NODES diff --git a/src/backend/nodes/equalfuncs.c b/src/backend/nodes/equalfuncs.c index 5810b4ff53..1b8b8985c1 100644 --- a/src/backend/nodes/equalfuncs.c +++ b/src/backend/nodes/equalfuncs.c @@ -848,6 +848,21 @@ _equalJsonValueExpr(const JsonValueExpr *a, const JsonValueExpr *b) return true; } +static bool +_equalJsonCtorExpr(const JsonCtorExpr *a, const JsonCtorExpr *b) +{ + COMPARE_SCALAR_FIELD(type); + COMPARE_NODE_FIELD(args); + COMPARE_NODE_FIELD(func); + COMPARE_NODE_FIELD(coercion); + COMPARE_NODE_FIELD(returning); + COMPARE_SCALAR_FIELD(absent_on_null); + COMPARE_SCALAR_FIELD(unique); + COMPARE_LOCATION_FIELD(location); + + return true; +} + /* * Stuff from pathnodes.h */ @@ -3249,6 +3264,9 @@ equal(const void *a, const void *b) case T_JsonValueExpr: retval = _equalJsonValueExpr(a, b); break; + case T_JsonCtorExpr: + retval = _equalJsonCtorExpr(a, b); + break; /* * RELATION NODES diff --git a/src/backend/nodes/makefuncs.c b/src/backend/nodes/makefuncs.c index 35b2c8014e..889bc2cd2f 100644 --- a/src/backend/nodes/makefuncs.c +++ b/src/backend/nodes/makefuncs.c @@ -868,3 +868,18 @@ makeJsonEncoding(char *name) return JS_ENC_DEFAULT; } + +/* + * makeJsonKeyValue - + * creates a JsonKeyValue node + */ +Node * +makeJsonKeyValue(Node *key, Node *value) +{ + JsonKeyValue *n = makeNode(JsonKeyValue); + + n->key = (Expr *) key; + n->value = castNode(JsonValueExpr, value); + + return (Node *) n; +} diff --git a/src/backend/nodes/nodeFuncs.c b/src/backend/nodes/nodeFuncs.c index df8f39fb61..1c7c55e780 100644 --- a/src/backend/nodes/nodeFuncs.c +++ b/src/backend/nodes/nodeFuncs.c @@ -265,6 +265,9 @@ exprType(const Node *expr) type = exprType((Node *) (jve->formatted_expr ? jve->formatted_expr : jve->raw_expr)); } break; + case T_JsonCtorExpr: + type = ((const JsonCtorExpr *) expr)->returning->typid; + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); type = InvalidOid; /* keep compiler quiet */ @@ -500,6 +503,8 @@ exprTypmod(const Node *expr) return exprTypmod((Node *) ((const PlaceHolderVar *) expr)->phexpr); case T_JsonValueExpr: return exprTypmod((Node *) ((const JsonValueExpr *) expr)->formatted_expr); + case T_JsonCtorExpr: + return -1; /* ((const JsonCtorExpr *) expr)->returning->typmod; */ default: break; } @@ -918,6 +923,16 @@ exprCollation(const Node *expr) case T_JsonValueExpr: coll = exprCollation((Node *) ((const JsonValueExpr *) expr)->formatted_expr); break; + case T_JsonCtorExpr: + { + const JsonCtorExpr *ctor = (const JsonCtorExpr *) expr; + + if (ctor->coercion) + coll = exprCollation((Node *) ctor->coercion); + else + coll = InvalidOid; + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); coll = InvalidOid; /* keep compiler quiet */ @@ -1125,6 +1140,16 @@ exprSetCollation(Node *expr, Oid collation) exprSetCollation((Node *) ((JsonValueExpr *) expr)->formatted_expr, collation); break; + case T_JsonCtorExpr: + { + JsonCtorExpr *ctor = (JsonCtorExpr *) expr; + + if (ctor->coercion) + exprSetCollation((Node *) ctor->coercion, collation); + else + Assert(!OidIsValid(collation)); /* result is always an json[b] type */ + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); break; @@ -1568,6 +1593,9 @@ exprLocation(const Node *expr) case T_JsonValueExpr: loc = exprLocation((Node *) ((const JsonValueExpr *) expr)->raw_expr); break; + case T_JsonCtorExpr: + loc = ((const JsonCtorExpr *) expr)->location; + break; default: /* for any other node type it's just unknown... */ loc = -1; @@ -2275,6 +2303,18 @@ expression_tree_walker(Node *node, return true; } break; + case T_JsonCtorExpr: + { + JsonCtorExpr *ctor = (JsonCtorExpr *) node; + + if (walker(ctor->args, context)) + return true; + if (walker(ctor->func, context)) + return true; + if (walker(ctor->coercion, context)) + return true; + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(node)); @@ -3219,6 +3259,19 @@ expression_tree_mutator(Node *node, MUTATE(newnode->formatted_expr, jve->formatted_expr, Expr *); MUTATE(newnode->format, jve->format, JsonFormat *); + return (Node *) newnode; + } + case T_JsonCtorExpr: + { + JsonCtorExpr *jve = (JsonCtorExpr *) node; + JsonCtorExpr *newnode; + + FLATCOPY(newnode, jve, JsonCtorExpr); + MUTATE(newnode->args, jve->args, List *); + MUTATE(newnode->func, jve->func, Expr *); + MUTATE(newnode->coercion, jve->coercion, Expr *); + MUTATE(newnode->returning, jve->returning, JsonReturning *); + return (Node *) newnode; } default: @@ -3930,6 +3983,102 @@ raw_expression_tree_walker(Node *node, return true; } break; + case T_JsonCtorExpr: + { + JsonCtorExpr *ctor = (JsonCtorExpr *) node; + + if (walker(ctor->args, context)) + return true; + if (walker(ctor->func, context)) + return true; + if (walker(ctor->coercion, context)) + return true; + if (walker(ctor->returning, context)) + return true; + } + break; + case T_JsonOutput: + { + JsonOutput *out = (JsonOutput *) node; + + if (walker(out->typeName, context)) + return true; + if (walker(out->returning, context)) + return true; + } + break; + case T_JsonKeyValue: + { + JsonKeyValue *jkv = (JsonKeyValue *) node; + + if (walker(jkv->key, context)) + return true; + if (walker(jkv->value, context)) + return true; + } + break; + case T_JsonObjectCtor: + { + JsonObjectCtor *joc = (JsonObjectCtor *) node; + + if (walker(joc->output, context)) + return true; + if (walker(joc->exprs, context)) + return true; + } + break; + case T_JsonArrayCtor: + { + JsonArrayCtor *jac = (JsonArrayCtor *) node; + + if (walker(jac->output, context)) + return true; + if (walker(jac->exprs, context)) + return true; + } + break; + case T_JsonObjectAgg: + { + JsonObjectAgg *joa = (JsonObjectAgg *) node; + + if (walker(joa->ctor.output, context)) + return true; + if (walker(joa->ctor.agg_order, context)) + return true; + if (walker(joa->ctor.agg_filter, context)) + return true; + if (walker(joa->ctor.over, context)) + return true; + if (walker(joa->arg, context)) + return true; + } + break; + case T_JsonArrayAgg: + { + JsonArrayAgg *jaa = (JsonArrayAgg *) node; + + if (walker(jaa->ctor.output, context)) + return true; + if (walker(jaa->ctor.agg_order, context)) + return true; + if (walker(jaa->ctor.agg_filter, context)) + return true; + if (walker(jaa->ctor.over, context)) + return true; + if (walker(jaa->arg, context)) + return true; + } + break; + case T_JsonArrayQueryCtor: + { + JsonArrayQueryCtor *jaqc = (JsonArrayQueryCtor *) node; + + if (walker(jaqc->output, context)) + return true; + if (walker(jaqc->query, context)) + return true; + } + break; default: elog(ERROR, "unrecognized node type: %d", (int) nodeTag(node)); diff --git a/src/backend/nodes/outfuncs.c b/src/backend/nodes/outfuncs.c index eb4023b976..0b0717f4d9 100644 --- a/src/backend/nodes/outfuncs.c +++ b/src/backend/nodes/outfuncs.c @@ -1739,6 +1739,21 @@ _outJsonValueExpr(StringInfo str, const JsonValueExpr *node) WRITE_NODE_FIELD(format); } +static void +_outJsonCtorExpr(StringInfo str, const JsonCtorExpr *node) +{ + WRITE_NODE_TYPE("JSONCTOREXPR"); + + WRITE_NODE_FIELD(args); + WRITE_NODE_FIELD(func); + WRITE_NODE_FIELD(coercion); + WRITE_INT_FIELD(type); + WRITE_NODE_FIELD(returning); + WRITE_BOOL_FIELD(unique); + WRITE_BOOL_FIELD(absent_on_null); + WRITE_LOCATION_FIELD(location); +} + /***************************************************************************** * * Stuff from pathnodes.h. @@ -4376,6 +4391,9 @@ outNode(StringInfo str, const void *obj) case T_JsonValueExpr: _outJsonValueExpr(str, obj); break; + case T_JsonCtorExpr: + _outJsonCtorExpr(str, obj); + break; default: diff --git a/src/backend/nodes/readfuncs.c b/src/backend/nodes/readfuncs.c index 92b39a2591..5d14c8342f 100644 --- a/src/backend/nodes/readfuncs.c +++ b/src/backend/nodes/readfuncs.c @@ -1388,6 +1388,26 @@ _readJsonValueExpr(void) READ_DONE(); } +/* + * _readJsonCtorExpr + */ +static JsonCtorExpr * +_readJsonCtorExpr(void) +{ + READ_LOCALS(JsonCtorExpr); + + READ_NODE_FIELD(args); + READ_NODE_FIELD(func); + READ_NODE_FIELD(coercion); + READ_INT_FIELD(type); + READ_NODE_FIELD(returning); + READ_BOOL_FIELD(unique); + READ_BOOL_FIELD(absent_on_null); + READ_LOCATION_FIELD(location); + + READ_DONE(); +} + /* * Stuff from pathnodes.h. * @@ -2931,6 +2951,8 @@ parseNodeString(void) return_value = _readJsonReturning(); else if (MATCH("JSONVALUEEXPR", 13)) return_value = _readJsonValueExpr(); + else if (MATCH("JSONCTOREXPR", 12)) + return_value = _readJsonCtorExpr(); else { elog(ERROR, "badly formatted node string \"%.32s\"...", token); diff --git a/src/backend/parser/gram.y b/src/backend/parser/gram.y index 8d777ef8e5..d27be7733a 100644 --- a/src/backend/parser/gram.y +++ b/src/backend/parser/gram.y @@ -602,11 +602,31 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); %type json_format_clause_opt json_representation json_value_expr + json_func_expr json_output_clause_opt + json_value_constructor + json_object_constructor + json_object_constructor_args_opt + json_object_args + json_object_ctor_args_opt + json_object_func_args + json_array_constructor + json_name_and_value + json_aggregate_func + json_object_aggregate_constructor + json_array_aggregate_constructor + +%type json_name_and_value_list + json_value_expr_list + json_array_aggregate_order_by_clause_opt %type json_encoding json_encoding_clause_opt +%type json_key_uniqueness_constraint_opt + json_object_constructor_null_clause_opt + json_array_constructor_null_clause_opt + /* * Non-keyword token types. These are hard-wired into the "flex" lexer. * They must be listed first so that their numeric codes do not depend on @@ -632,7 +652,7 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); */ /* ordinary key words in alphabetical order */ -%token ABORT_P ABSOLUTE_P ACCESS ACTION ADD_P ADMIN AFTER +%token ABORT_P ABSENT ABSOLUTE_P ACCESS ACTION ADD_P ADMIN AFTER AGGREGATE ALL ALSO ALTER ALWAYS ANALYSE ANALYZE AND ANY ARRAY AS ASC ASSERTION ASSIGNMENT ASYMMETRIC AT ATTACH ATTRIBUTE AUTHORIZATION @@ -669,9 +689,9 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); INNER_P INOUT INPUT_P INSENSITIVE INSERT INSTEAD INT_P INTEGER INTERSECT INTERVAL INTO INVOKER IS ISNULL ISOLATION - JOIN JSON + JOIN JSON JSON_ARRAY JSON_ARRAYAGG JSON_OBJECT JSON_OBJECTAGG - KEY + KEY KEYS LABEL LANGUAGE LARGE_P LAST_P LATERAL_P LEADING LEAKPROOF LEAST LEFT LEVEL LIKE LIMIT LISTEN LOAD LOCAL @@ -735,7 +755,7 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); * as NOT, at least with respect to their left-hand subexpression. * NULLS_LA and WITH_LA are needed to make the grammar LALR(1). */ -%token NOT_LA NULLS_LA WITH_LA +%token NOT_LA NULLS_LA WITH_LA WITH_LA_UNIQUE WITHOUT_LA /* Precedence: lowest to highest */ %nonassoc SET /* see relation_expr_opt_alias */ @@ -778,6 +798,7 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); * blame any funny behavior of UNBOUNDED on the SQL standard, though. */ %nonassoc UNBOUNDED /* ideally should have same precedence as IDENT */ +%nonassoc ABSENT %nonassoc IDENT GENERATED NULL_P PARTITION RANGE ROWS GROUPS PRECEDING FOLLOWING CUBE ROLLUP %left Op OPERATOR /* multi-character ops and user-defined operators */ %left '+' '-' @@ -802,6 +823,9 @@ static Node *makeRecursiveViewSelect(char *relname, List *aliases, Node *query); /* kluge to keep xml_whitespace_option from causing shift/reduce conflicts */ %right PRESERVE STRIP_P +%nonassoc empty_json_unique +%left WITHOUT WITH_LA_UNIQUE + %% /* @@ -12834,7 +12858,7 @@ ConstInterval: opt_timezone: WITH_LA TIME ZONE { $$ = true; } - | WITHOUT TIME ZONE { $$ = false; } + | WITHOUT_LA TIME ZONE { $$ = false; } | /*EMPTY*/ { $$ = false; } ; @@ -13443,6 +13467,17 @@ b_expr: c_expr } ; +json_key_uniqueness_constraint_opt: + WITH_LA_UNIQUE UNIQUE opt_keys { $$ = true; } + | WITHOUT UNIQUE opt_keys { $$ = false; } + | /* EMPTY */ %prec empty_json_unique { $$ = false; } + ; + +opt_keys: + KEYS { } + | /* EMPTY */ { } + ; + /* * Productions that can be used in both a_expr and b_expr. * @@ -13703,6 +13738,13 @@ func_expr: func_application within_group_clause filter_clause over_clause n->over = $4; $$ = (Node *) n; } + | json_aggregate_func filter_clause over_clause + { + JsonAggCtor *n = (JsonAggCtor *) $1; + n->agg_filter = $2; + n->over = $3; + $$ = (Node *) $1; + } | func_expr_common_subexpr { $$ = $1; } ; @@ -13716,6 +13758,7 @@ func_expr: func_application within_group_clause filter_clause over_clause func_expr_windowless: func_application { $$ = $1; } | func_expr_common_subexpr { $$ = $1; } + | json_aggregate_func { $$ = $1; } ; /* @@ -13940,6 +13983,8 @@ func_expr_common_subexpr: n->location = @1; $$ = (Node *)n; } + | json_func_expr + { $$ = $1; } ; /* @@ -14644,11 +14689,14 @@ opt_asymmetric: ASYMMETRIC ; /* SQL/JSON support */ +json_func_expr: + json_value_constructor + ; json_value_expr: a_expr json_format_clause_opt { - $$ = (Node *) makeJsonValueExpr((Expr *) $1, $2); + $$ = (Node *) makeJsonValueExpr((Expr *) $1, castNode(JsonFormat, $2)); } ; @@ -14656,7 +14704,7 @@ json_format_clause_opt: FORMAT json_representation { $$ = $2; - $$.location = @1; + castNode(JsonFormat, $$)->location = @1; } | /* EMPTY */ { @@ -14686,10 +14734,194 @@ json_output_clause_opt: { JsonOutput *n = makeNode(JsonOutput); n->typeName = $2; - n->returning.format = $3; + n->returning = makeNode(JsonReturning); + n->returning->format = (JsonFormat *) $3; $$ = (Node *) n; } | /* EMPTY */ { $$ = NULL; } + ; + +json_value_constructor: + json_object_constructor + | json_array_constructor + ; + +json_object_constructor: + JSON_OBJECT '(' json_object_args ')' + { + $$ = $3; + } + ; + +json_object_args: + json_object_ctor_args_opt + | json_object_func_args + ; + +json_object_func_args: + func_arg_list + { + List *func = list_make1(makeString("json_object")); + $$ = (Node *) makeFuncCall(func, $1, @1); + } + ; + +json_object_ctor_args_opt: + json_object_constructor_args_opt json_output_clause_opt + { + JsonObjectCtor *n = (JsonObjectCtor *) $1; + n->output = (JsonOutput *) $2; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_object_constructor_args_opt: + json_name_and_value_list + json_object_constructor_null_clause_opt + json_key_uniqueness_constraint_opt + { + JsonObjectCtor *n = makeNode(JsonObjectCtor); + n->exprs = $1; + n->absent_on_null = $2; + n->unique = $3; + $$ = (Node *) n; + } + | /* EMPTY */ + { + JsonObjectCtor *n = makeNode(JsonObjectCtor); + n->exprs = NULL; + n->absent_on_null = false; + n->unique = false; + $$ = (Node *) n; + } + ; + +json_name_and_value_list: + json_name_and_value + { $$ = list_make1($1); } + | json_name_and_value_list ',' json_name_and_value + { $$ = lappend($1, $3); } + ; + +json_name_and_value: +/* TODO This is not supported due to conflicts + KEY c_expr VALUE_P json_value_expr %prec POSTFIXOP + { $$ = makeJsonKeyValue($2, $4); } + | +*/ + c_expr VALUE_P json_value_expr + { $$ = makeJsonKeyValue($1, $3); } + | + a_expr ':' json_value_expr + { $$ = makeJsonKeyValue($1, $3); } + ; + +json_object_constructor_null_clause_opt: + NULL_P ON NULL_P { $$ = false; } + | ABSENT ON NULL_P { $$ = true; } + | /* EMPTY */ { $$ = false; } + ; + +json_array_constructor: + JSON_ARRAY '(' + json_value_expr_list + json_array_constructor_null_clause_opt + json_output_clause_opt + ')' + { + JsonArrayCtor *n = makeNode(JsonArrayCtor); + n->exprs = $3; + n->absent_on_null = $4; + n->output = (JsonOutput *) $5; + n->location = @1; + $$ = (Node *) n; + } + | JSON_ARRAY '(' + select_no_parens + /* json_format_clause_opt */ + /* json_array_constructor_null_clause_opt */ + json_output_clause_opt + ')' + { + JsonArrayQueryCtor *n = makeNode(JsonArrayQueryCtor); + n->query = $3; + n->format = makeJsonFormat(JS_FORMAT_DEFAULT, JS_ENC_DEFAULT, -1); + /* n->format = $4; */ + n->absent_on_null = true /* $5 */; + n->output = (JsonOutput *) $4; + n->location = @1; + $$ = (Node *) n; + } + | JSON_ARRAY '(' + json_output_clause_opt + ')' + { + JsonArrayCtor *n = makeNode(JsonArrayCtor); + n->exprs = NIL; + n->absent_on_null = true; + n->output = (JsonOutput *) $3; + n->location = @1; + $$ = (Node *) n; + } + ; + +json_value_expr_list: + json_value_expr { $$ = list_make1($1); } + | json_value_expr_list ',' json_value_expr { $$ = lappend($1, $3);} + ; + +json_array_constructor_null_clause_opt: + NULL_P ON NULL_P { $$ = false; } + | ABSENT ON NULL_P { $$ = true; } + | /* EMPTY */ { $$ = true; } + ; + +json_aggregate_func: + json_object_aggregate_constructor + | json_array_aggregate_constructor + ; + +json_object_aggregate_constructor: + JSON_OBJECTAGG '(' + json_name_and_value + json_object_constructor_null_clause_opt + json_key_uniqueness_constraint_opt + json_output_clause_opt + ')' + { + JsonObjectAgg *n = makeNode(JsonObjectAgg); + n->arg = (JsonKeyValue *) $3; + n->absent_on_null = $4; + n->unique = $5; + n->ctor.output = (JsonOutput *) $6; + n->ctor.agg_order = NULL; + n->ctor.location = @1; + $$ = (Node *) n; + } + ; + +json_array_aggregate_constructor: + JSON_ARRAYAGG '(' + json_value_expr + json_array_aggregate_order_by_clause_opt + json_array_constructor_null_clause_opt + json_output_clause_opt + ')' + { + JsonArrayAgg *n = makeNode(JsonArrayAgg); + n->arg = (JsonValueExpr *) $3; + n->ctor.agg_order = $4; + n->absent_on_null = $5; + n->ctor.output = (JsonOutput *) $6; + n->ctor.location = @1; + $$ = (Node *) n; + } + ; + +json_array_aggregate_order_by_clause_opt: + ORDER BY sortby_list { $$ = $3; } + | /* EMPTY */ { $$ = NIL; } ; /***************************************************************************** @@ -15079,6 +15311,7 @@ ColLabel: IDENT { $$ = $1; } */ unreserved_keyword: ABORT_P + | ABSENT | ABSOLUTE_P | ACCESS | ACTION @@ -15203,6 +15436,7 @@ unreserved_keyword: | ISOLATION | JSON | KEY + | KEYS | LABEL | LANGUAGE | LARGE_P @@ -15410,6 +15644,10 @@ col_name_keyword: | INT_P | INTEGER | INTERVAL + | JSON_ARRAY + | JSON_ARRAYAGG + | JSON_OBJECT + | JSON_OBJECTAGG | LEAST | NATIONAL | NCHAR diff --git a/src/backend/parser/parse_expr.c b/src/backend/parser/parse_expr.c index 48b0437182..af0687c69c 100644 --- a/src/backend/parser/parse_expr.c +++ b/src/backend/parser/parse_expr.c @@ -15,6 +15,8 @@ #include "postgres.h" +#include "catalog/pg_aggregate.h" +#include "catalog/pg_proc.h" #include "catalog/pg_type.h" #include "commands/dbcommands.h" #include "miscadmin.h" @@ -121,6 +123,12 @@ static Node *transformWholeRowRef(ParseState *pstate, static Node *transformIndirection(ParseState *pstate, A_Indirection *ind); static Node *transformTypeCast(ParseState *pstate, TypeCast *tc); static Node *transformCollateClause(ParseState *pstate, CollateClause *c); +static Node *transformJsonObjectCtor(ParseState *pstate, JsonObjectCtor *ctor); +static Node *transformJsonArrayCtor(ParseState *pstate, JsonArrayCtor *ctor); +static Node *transformJsonArrayQueryCtor(ParseState *pstate, + JsonArrayQueryCtor *ctor); +static Node *transformJsonObjectAgg(ParseState *pstate, JsonObjectAgg *agg); +static Node *transformJsonArrayAgg(ParseState *pstate, JsonArrayAgg *agg); static Node *make_row_comparison_op(ParseState *pstate, List *opname, List *largs, List *rargs, int location); static Node *make_row_distinct_op(ParseState *pstate, List *opname, @@ -369,6 +377,26 @@ transformExprRecurse(ParseState *pstate, Node *expr) break; } + case T_JsonObjectCtor: + result = transformJsonObjectCtor(pstate, (JsonObjectCtor *) expr); + break; + + case T_JsonArrayCtor: + result = transformJsonArrayCtor(pstate, (JsonArrayCtor *) expr); + break; + + case T_JsonArrayQueryCtor: + result = transformJsonArrayQueryCtor(pstate, (JsonArrayQueryCtor *) expr); + break; + + case T_JsonObjectAgg: + result = transformJsonObjectAgg(pstate, (JsonObjectAgg *) expr); + break; + + case T_JsonArrayAgg: + result = transformJsonArrayAgg(pstate, (JsonArrayAgg *) expr); + break; + default: /* should not reach here */ elog(ERROR, "unrecognized node type: %d", (int) nodeTag(expr)); @@ -3747,3 +3775,514 @@ transformJsonValueExpr(ParseState *pstate, JsonValueExpr *ve, return expr; } + +/* + * Checks specified output format for its applicability to the target type. + */ +static void +checkJsonOutputFormat(ParseState *pstate, const JsonFormat *format, + Oid targettype, bool allow_format_for_non_strings) +{ + if (!allow_format_for_non_strings && + format->format != JS_FORMAT_DEFAULT && + (targettype != BYTEAOID && + targettype != JSONOID && + targettype != JSONBOID)) + { + char typcategory; + bool typispreferred; + + get_type_category_preferred(targettype, &typcategory, &typispreferred); + + if (typcategory != TYPCATEGORY_STRING) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + parser_errposition(pstate, format->location), + errmsg("cannot use JSON format with non-string output types"))); + } + + if (format->format == JS_FORMAT_JSON) + { + JsonEncoding enc = format->encoding != JS_ENC_DEFAULT ? + format->encoding : JS_ENC_UTF8; + + if (targettype != BYTEAOID && + format->encoding != JS_ENC_DEFAULT) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + parser_errposition(pstate, format->location), + errmsg("cannot set JSON encoding for non-bytea output types"))); + + if (enc != JS_ENC_UTF8) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("unsupported JSON encoding"), + errhint("only UTF8 JSON encoding is supported"), + parser_errposition(pstate, format->location))); + } +} + +/* + * Transform JSON output clause. + * + * Assigns target type oid and modifier. + * Assigns default format or checks specified format for its applicability to + * the target type. + */ +static JsonReturning * +transformJsonOutput(ParseState *pstate, const JsonOutput *output, + bool allow_format) +{ + JsonReturning *ret; + + /* if output clause is not specified, make default clause value */ + if (!output) + { + ret = makeNode(JsonReturning); + + ret->format = makeNode(JsonFormat); + ret->format->format = JS_FORMAT_DEFAULT; + ret->format->encoding = JS_ENC_DEFAULT; + ret->format->location = -1; + ret->typid = InvalidOid; + ret->typmod = -1; + + return ret; + } + + ret = copyObject(output->returning); + + typenameTypeIdAndMod(pstate, output->typeName, &ret->typid, &ret->typmod); + + if (output->typeName->setof) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("returning SETOF types is not supported in SQL/JSON functions"))); + + if (ret->format->format == JS_FORMAT_DEFAULT) + /* assign JSONB format when returning jsonb, or JSON format otherwise */ + ret->format->format = + ret->typid == JSONBOID ? JS_FORMAT_JSONB : JS_FORMAT_JSON; + else + checkJsonOutputFormat(pstate, ret->format, ret->typid, allow_format); + + return ret; +} + +/* + * Coerce json[b]-valued function expression to the output type. + */ +static Node * +coerceJsonFuncExpr(ParseState *pstate, Node *expr, + const JsonReturning *returning, bool report_error) +{ + Node *res; + int location; + Oid exprtype = exprType(expr); + + /* if output type is not specified or equals to function type, return */ + if (!OidIsValid(returning->typid) || returning->typid == exprtype) + return expr; + + location = exprLocation(expr); + + if (location < 0) + location = returning ? returning->format->location : -1; + + /* special case for RETURNING bytea FORMAT json */ + if (returning->format->format == JS_FORMAT_JSON && + returning->typid == BYTEAOID) + { + /* encode json text into bytea using pg_convert_to() */ + Node *texpr = coerce_to_specific_type(pstate, expr, TEXTOID, + "JSON_FUNCTION"); + Const *enc = getJsonEncodingConst(returning->format); + FuncExpr *fexpr = makeFuncExpr(F_PG_CONVERT_TO, BYTEAOID, + list_make2(texpr, enc), + InvalidOid, InvalidOid, + COERCE_EXPLICIT_CALL); + fexpr->location = location; + + return (Node *) fexpr; + } + + /* try to coerce expression to the output type */ + res = coerce_to_target_type(pstate, expr, exprtype, + returning->typid, returning->typmod, + /* XXX throwing errors when casting to char(N) */ + COERCION_EXPLICIT, + COERCE_EXPLICIT_CAST, + location); + + if (!res && report_error) + ereport(ERROR, + (errcode(ERRCODE_CANNOT_COERCE), + errmsg("cannot cast type %s to %s", + format_type_be(exprtype), + format_type_be(returning->typid)), + parser_coercion_errposition(pstate, location, expr))); + + return res; +} + +static Node * +makeJsonCtorExpr(ParseState *pstate, JsonCtorType type, List *args, Expr *fexpr, + JsonReturning *returning, bool unique, bool absent_on_null, + int location) +{ + Node *placeholder; + Node *coercion; + JsonCtorExpr *jsctor = makeNode(JsonCtorExpr); + Oid default_typid = + returning->format->format == JS_FORMAT_JSONB ? JSONBOID : JSONOID; + + jsctor->args = args; + jsctor->func = fexpr; + jsctor->type = type; + jsctor->returning = returning; + jsctor->unique = unique; + jsctor->absent_on_null = absent_on_null; + jsctor->location = location; + + if (fexpr) + placeholder = makeCaseTestExpr((Node *) fexpr); + else + { + CaseTestExpr *cte = makeNode(CaseTestExpr); + + cte->typeId = default_typid; + cte->typeMod = -1; + cte->collation = InvalidOid; + + placeholder = (Node *) cte; + } + + coercion = coerceJsonFuncExpr(pstate, placeholder, returning, true); + + if (coercion != placeholder) + jsctor->coercion = (Expr *) coercion; + + /* Assign default RETURNING type */ + if (!OidIsValid(jsctor->returning->typid)) + { + jsctor->returning->typid = default_typid; + jsctor->returning->typmod = -1; + } + + return (Node *) jsctor; +} + +/* + * Transform JSON_OBJECT() constructor. + * + * JSON_OBJECT() is transformed into json[b]_build_object[_ext]() call + * depending on the output JSON format. The first two arguments of + * json[b]_build_object_ext() are absent_on_null and check_key_uniqueness. + * + * Then function call result is coerced to the target type. + */ +static Node * +transformJsonObjectCtor(ParseState *pstate, JsonObjectCtor *ctor) +{ + JsonReturning *returning; + List *args = NIL; + + /* transform key-value pairs, if any */ + if (ctor->exprs) + { + ListCell *lc; + + /* transform and append key-value arguments */ + foreach(lc, ctor->exprs) + { + JsonKeyValue *kv = castNode(JsonKeyValue, lfirst(lc)); + Node *key = transformExprRecurse(pstate, (Node *) kv->key); + Node *val = transformJsonValueExpr(pstate, kv->value, + JS_FORMAT_DEFAULT); + + args = lappend(args, key); + args = lappend(args, val); + } + } + + returning = transformJsonOutput(pstate, ctor->output, true); + + return makeJsonCtorExpr(pstate, JSCTOR_JSON_OBJECT, args, NULL, + returning, ctor->unique, ctor->absent_on_null, + ctor->location); +} + +/* + * Transform JSON_ARRAY(query [FORMAT] [RETURNING] [ON NULL]) into + * (SELECT JSON_ARRAYAGG(a [FORMAT] [RETURNING] [ON NULL]) FROM (query) q(a)) + */ +static Node * +transformJsonArrayQueryCtor(ParseState *pstate, JsonArrayQueryCtor *ctor) +{ + SubLink *sublink = makeNode(SubLink); + SelectStmt *select = makeNode(SelectStmt); + RangeSubselect *range = makeNode(RangeSubselect); + Alias *alias = makeNode(Alias); + ResTarget *target = makeNode(ResTarget); + JsonArrayAgg *agg = makeNode(JsonArrayAgg); + ColumnRef *colref = makeNode(ColumnRef); + Query *query; + ParseState *qpstate; + + /* Transform query only for counting target list entries. */ + qpstate = make_parsestate(pstate); + + query = transformStmt(qpstate, ctor->query); + + if (count_nonjunk_tlist_entries(query->targetList) != 1) + ereport(ERROR, + (errcode(ERRCODE_SYNTAX_ERROR), + errmsg("subquery must return only one column"), + parser_errposition(pstate, ctor->location))); + + free_parsestate(qpstate); + + colref->fields = list_make2(makeString(pstrdup("q")), + makeString(pstrdup("a"))); + colref->location = ctor->location; + + agg->arg = makeJsonValueExpr((Expr *) colref, ctor->format); + agg->ctor.agg_order = NIL; + agg->ctor.output = ctor->output; + agg->absent_on_null = ctor->absent_on_null; + agg->ctor.location = ctor->location; + + target->name = NULL; + target->indirection = NIL; + target->val = (Node *) agg; + target->location = ctor->location; + + alias->aliasname = pstrdup("q"); + alias->colnames = list_make1(makeString(pstrdup("a"))); + + range->lateral = false; + range->subquery = ctor->query; + range->alias = alias; + + select->targetList = list_make1(target); + select->fromClause = list_make1(range); + + sublink->subLinkType = EXPR_SUBLINK; + sublink->subLinkId = 0; + sublink->testexpr = NULL; + sublink->operName = NIL; + sublink->subselect = (Node *) select; + sublink->location = ctor->location; + + return transformExprRecurse(pstate, (Node *) sublink); +} + +/* + * Common code for JSON_OBJECTAGG and JSON_ARRAYAGG transformation. + */ +static Node * +transformJsonAggCtor(ParseState *pstate, JsonAggCtor *agg_ctor, + JsonReturning *returning, List *args, const char *aggfn, + Oid aggtype, JsonCtorType ctor_type, + bool unique, bool absent_on_null) +{ + Oid aggfnoid; + Node *node; + Expr *aggfilter = agg_ctor->agg_filter ? (Expr *) + transformWhereClause(pstate, agg_ctor->agg_filter, + EXPR_KIND_FILTER, "FILTER") : NULL; + + aggfnoid = DatumGetInt32(DirectFunctionCall1(regprocin, + CStringGetDatum(aggfn))); + + if (agg_ctor->over) + { + /* window function */ + WindowFunc *wfunc = makeNode(WindowFunc); + + wfunc->winfnoid = aggfnoid; + wfunc->wintype = aggtype; + /* wincollid and inputcollid will be set by parse_collate.c */ + wfunc->args = args; + /* winref will be set by transformWindowFuncCall */ + wfunc->winstar = false; + wfunc->winagg = true; + wfunc->aggfilter = aggfilter; + wfunc->location = agg_ctor->location; + + /* + * ordered aggs not allowed in windows yet + */ + if (agg_ctor->agg_order != NIL) + ereport(ERROR, + (errcode(ERRCODE_FEATURE_NOT_SUPPORTED), + errmsg("aggregate ORDER BY is not implemented for window functions"), + parser_errposition(pstate, agg_ctor->location))); + + /* parse_agg.c does additional window-func-specific processing */ + transformWindowFuncCall(pstate, wfunc, agg_ctor->over); + + node = (Node *) wfunc; + } + else + { + Aggref *aggref = makeNode(Aggref); + + aggref->aggfnoid = aggfnoid; + aggref->aggtype = aggtype; + + /* aggcollid and inputcollid will be set by parse_collate.c */ + aggref->aggtranstype = InvalidOid; /* will be set by planner */ + /* aggargtypes will be set by transformAggregateCall */ + /* aggdirectargs and args will be set by transformAggregateCall */ + /* aggorder and aggdistinct will be set by transformAggregateCall */ + aggref->aggfilter = aggfilter; + aggref->aggstar = false; + aggref->aggvariadic = false; + aggref->aggkind = AGGKIND_NORMAL; + /* agglevelsup will be set by transformAggregateCall */ + aggref->aggsplit = AGGSPLIT_SIMPLE; /* planner might change this */ + aggref->location = agg_ctor->location; + + transformAggregateCall(pstate, aggref, args, agg_ctor->agg_order, false); + + node = (Node *) aggref; + } + + return makeJsonCtorExpr(pstate, ctor_type, NULL, (Expr *) node, returning, + unique, absent_on_null, agg_ctor->location); +} + +/* + * Transform JSON_OBJECTAGG() aggregate function. + * + * JSON_OBJECTAGG() is transformed into + * json[b]_objectagg(key, value, absent_on_null, check_unique) call depending on + * the output JSON format. Then the function call result is coerced to the + * target output type. + */ +static Node * +transformJsonObjectAgg(ParseState *pstate, JsonObjectAgg *agg) +{ + JsonReturning *returning; + Node *key; + Node *val; + List *args; + const char *aggfnname; + Oid aggtype; + + returning = transformJsonOutput(pstate, agg->ctor.output, true); + + key = transformExprRecurse(pstate, (Node *) agg->arg->key); + val = transformJsonValueExpr(pstate, agg->arg->value, JS_FORMAT_DEFAULT); + args = list_make2(key, val); + + if (returning->format->format == JS_FORMAT_JSONB) + { + if (agg->absent_on_null) + if (agg->unique) + aggfnname = "pg_catalog.jsonb_object_agg_unique_strict"; /* F_JSONB_OBJECT_AGG_UNIQUE_STRICT */ + else + aggfnname = "pg_catalog.jsonb_object_agg_strict"; /* F_JSONB_OBJECT_AGG_STRICT */ + else + if (agg->unique) + aggfnname = "pg_catalog.jsonb_object_agg_unique"; /* F_JSONB_OBJECT_AGG_UNIQUE */ + else + aggfnname = "pg_catalog.jsonb_object_agg"; /* F_JSONB_OBJECT_AGG */ + + aggtype = JSONBOID; + } + else + { + if (agg->absent_on_null) + if (agg->unique) + aggfnname = "pg_catalog.json_object_agg_unique_strict"; /* F_JSON_OBJECT_AGG_UNIQUE_STRICT */ + else + aggfnname = "pg_catalog.json_object_agg_strict"; /* F_JSON_OBJECT_AGG_STRICT */ + else + if (agg->unique) + aggfnname = "pg_catalog.json_object_agg_unique"; /* F_JSON_OBJECT_AGG_UNIQUE */ + else + aggfnname = "pg_catalog.json_object_agg"; /* F_JSON_OBJECT_AGG */ + + aggtype = JSONOID; + } + + return transformJsonAggCtor(pstate, &agg->ctor, returning, args, aggfnname, + aggtype, JSCTOR_JSON_OBJECTAGG, + agg->unique, agg->absent_on_null); +} + +/* + * Transform JSON_ARRAYAGG() aggregate function. + * + * JSON_ARRAYAGG() is transformed into json[b]_agg[_strict]() call depending + * on the output JSON format and absent_on_null. Then the function call result + * is coerced to the target output type. + */ +static Node * +transformJsonArrayAgg(ParseState *pstate, JsonArrayAgg *agg) +{ + JsonReturning *returning; + Node *arg; + const char *aggfnname; + Oid aggtype; + + returning = transformJsonOutput(pstate, agg->ctor.output, true); + + arg = transformJsonValueExpr(pstate, agg->arg, JS_FORMAT_DEFAULT); + + if (returning->format->format == JS_FORMAT_JSONB) + { + aggfnname = agg->absent_on_null ? + "pg_catalog.jsonb_agg_strict" : "pg_catalog.jsonb_agg"; + aggtype = JSONBOID; + } + else + { + aggfnname = agg->absent_on_null ? + "pg_catalog.json_agg_strict" : "pg_catalog.json_agg"; + aggtype = JSONOID; + } + + return transformJsonAggCtor(pstate, &agg->ctor, returning, list_make1(arg), + aggfnname, aggtype, JSCTOR_JSON_ARRAYAGG, + false, agg->absent_on_null); +} + +/* + * Transform JSON_ARRAY() constructor. + * + * JSON_ARRAY() is transformed into json[b]_build_array[_ext]() call + * depending on the output JSON format. The first argument of + * json[b]_build_array_ext() is absent_on_null. + * + * Then function call result is coerced to the target type. + */ +static Node * +transformJsonArrayCtor(ParseState *pstate, JsonArrayCtor *ctor) +{ + JsonReturning *returning; + List *args = NIL; + + /* transform element expressions, if any */ + if (ctor->exprs) + { + ListCell *lc; + + /* transform and append element arguments */ + foreach(lc, ctor->exprs) + { + JsonValueExpr *jsval = castNode(JsonValueExpr, lfirst(lc)); + Node *val = transformJsonValueExpr(pstate, jsval, + JS_FORMAT_DEFAULT); + + args = lappend(args, val); + } + } + + returning = transformJsonOutput(pstate, ctor->output, true); + + return makeJsonCtorExpr(pstate, JSCTOR_JSON_ARRAY, args, NULL, returning, + false, ctor->absent_on_null, ctor->location); +} diff --git a/src/backend/parser/parse_target.c b/src/backend/parser/parse_target.c index 566c517837..b191acb124 100644 --- a/src/backend/parser/parse_target.c +++ b/src/backend/parser/parse_target.c @@ -1931,6 +1931,19 @@ FigureColnameInternal(Node *node, char **name) case T_XmlSerialize: *name = "xmlserialize"; return 2; + case T_JsonObjectCtor: + *name = "json_object"; + return 2; + case T_JsonArrayCtor: + case T_JsonArrayQueryCtor: + *name = "json_array"; + return 2; + case T_JsonObjectAgg: + *name = "json_objectagg"; + return 2; + case T_JsonArrayAgg: + *name = "json_arrayagg"; + return 2; default: break; } diff --git a/src/backend/parser/parser.c b/src/backend/parser/parser.c index be86eb37fe..b1d1a1e892 100644 --- a/src/backend/parser/parser.c +++ b/src/backend/parser/parser.c @@ -131,6 +131,9 @@ base_yylex(YYSTYPE *lvalp, YYLTYPE *llocp, core_yyscan_t yyscanner) case USCONST: cur_token_length = strlen(yyextra->core_yy_extra.scanbuf + *llocp); break; + case WITHOUT: + cur_token_length = 7; + break; default: return cur_token; } @@ -202,6 +205,19 @@ base_yylex(YYSTYPE *lvalp, YYLTYPE *llocp, core_yyscan_t yyscanner) case ORDINALITY: cur_token = WITH_LA; break; + case UNIQUE: + cur_token = WITH_LA_UNIQUE; + break; + } + break; + + case WITHOUT: + /* Replace WITHOUT by WITHOUT_LA if it's followed by TIME */ + switch (next_token) + { + case TIME: + cur_token = WITHOUT_LA; + break; } break; diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c index 641ae3fdf8..4a1091988d 100644 --- a/src/backend/utils/adt/json.c +++ b/src/backend/utils/adt/json.c @@ -42,6 +42,23 @@ typedef enum /* type categories for datum_to_json */ JSONTYPE_OTHER /* all else */ } JsonTypeCategory; +/* Context for key uniqueness check */ +typedef struct JsonUniqueCheckContext +{ + struct JsonKeyInfo + { + int offset; /* key offset: + * in result if positive, + * in skipped_keys if negative */ + int length; /* key length */ + } *keys; /* key info array */ + int nkeys; /* number of processed keys */ + int nallocated; /* number of allocated keys in array */ + StringInfo result; /* resulting json */ + StringInfoData skipped_keys; /* skipped keys with NULL values */ + MemoryContext mcxt; /* context for saving skipped keys */ +} JsonUniqueCheckContext; + typedef struct JsonAggState { StringInfo str; @@ -49,6 +66,7 @@ typedef struct JsonAggState Oid key_output_func; JsonTypeCategory val_category; Oid val_output_func; + JsonUniqueCheckContext unique_check; } JsonAggState; static void composite_to_json(Datum composite, StringInfo result, @@ -754,8 +772,8 @@ to_json(PG_FUNCTION_ARGS) * * aggregate input column as a json array value. */ -Datum -json_agg_transfn(PG_FUNCTION_ARGS) +static Datum +json_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null) { MemoryContext aggcontext, oldcontext; @@ -795,9 +813,14 @@ json_agg_transfn(PG_FUNCTION_ARGS) else { state = (JsonAggState *) PG_GETARG_POINTER(0); - appendStringInfoString(state->str, ", "); } + if (absent_on_null && PG_ARGISNULL(1)) + PG_RETURN_POINTER(state); + + if (state->str->len > 1) + appendStringInfoString(state->str, ", "); + /* fast path for NULLs */ if (PG_ARGISNULL(1)) { @@ -809,7 +832,7 @@ json_agg_transfn(PG_FUNCTION_ARGS) val = PG_GETARG_DATUM(1); /* add some whitespace if structured type and not first item */ - if (!PG_ARGISNULL(0) && + if (!PG_ARGISNULL(0) && state->str->len > 1 && (state->val_category == JSONTYPE_ARRAY || state->val_category == JSONTYPE_COMPOSITE)) { @@ -827,6 +850,25 @@ json_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } + +/* + * json_agg aggregate function + */ +Datum +json_agg_transfn(PG_FUNCTION_ARGS) +{ + return json_agg_transfn_worker(fcinfo, false); +} + +/* + * json_agg_strict aggregate function + */ +Datum +json_agg_strict_transfn(PG_FUNCTION_ARGS) +{ + return json_agg_transfn_worker(fcinfo, true); +} + /* * json_agg final function */ @@ -850,18 +892,115 @@ json_agg_finalfn(PG_FUNCTION_ARGS) PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, "]")); } +static inline void +json_unique_check_init(JsonUniqueCheckContext *cxt, + StringInfo result, int nkeys) +{ + cxt->mcxt = CurrentMemoryContext; + cxt->nkeys = 0; + cxt->nallocated = nkeys ? nkeys : 16; + cxt->keys = palloc(sizeof(*cxt->keys) * cxt->nallocated); + cxt->result = result; + cxt->skipped_keys.data = NULL; +} + +static inline void +json_unique_check_free(JsonUniqueCheckContext *cxt) +{ + if (cxt->keys) + pfree(cxt->keys); + + if (cxt->skipped_keys.data) + pfree(cxt->skipped_keys.data); +} + +/* On-demand initialization of skipped_keys StringInfo structure */ +static inline StringInfo +json_unique_check_get_skipped_keys(JsonUniqueCheckContext *cxt) +{ + StringInfo out = &cxt->skipped_keys; + + if (!out->data) + { + MemoryContext oldcxt = MemoryContextSwitchTo(cxt->mcxt); + initStringInfo(out); + MemoryContextSwitchTo(oldcxt); + } + + return out; +} + +/* + * Save current key offset (key is not yet appended) to the key list, key + * length is saved later in json_unique_check_key() when the key is appended. + */ +static inline void +json_unique_check_save_key_offset(JsonUniqueCheckContext *cxt, StringInfo out) +{ + if (cxt->nkeys >= cxt->nallocated) + { + cxt->nallocated *= 2; + cxt->keys = repalloc(cxt->keys, sizeof(*cxt->keys) * cxt->nallocated); + } + + cxt->keys[cxt->nkeys++].offset = out->len; +} + +/* + * Check uniqueness of key already appended to 'out' StringInfo. + */ +static inline void +json_unique_check_key(JsonUniqueCheckContext *cxt, StringInfo out) +{ + struct JsonKeyInfo *keys = cxt->keys; + int curr = cxt->nkeys - 1; + int offset = keys[curr].offset; + int length = out->len - offset; + char *curr_key = &out->data[offset]; + int i; + + keys[curr].length = length; /* save current key length */ + + if (out == &cxt->skipped_keys) + /* invert offset for skipped keys for their recognition */ + keys[curr].offset = -keys[curr].offset; + + /* check collisions with previous keys */ + for (i = 0; i < curr; i++) + { + char *prev_key; + + if (cxt->keys[i].length != length) + continue; + + offset = cxt->keys[i].offset; + + prev_key = offset > 0 + ? &cxt->result->data[offset] + : &cxt->skipped_keys.data[-offset]; + + if (!memcmp(curr_key, prev_key, length)) + ereport(ERROR, + (errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE), + errmsg("duplicate JSON key %s", curr_key))); + } +} + /* * json_object_agg transition function. * * aggregate two input columns as a single json object value. */ -Datum -json_object_agg_transfn(PG_FUNCTION_ARGS) +static Datum +json_object_agg_transfn_worker(FunctionCallInfo fcinfo, + bool absent_on_null, bool unique_keys) { MemoryContext aggcontext, oldcontext; JsonAggState *state; + StringInfo out; Datum arg; + bool skip; if (!AggCheckCallContext(fcinfo, &aggcontext)) { @@ -882,6 +1021,10 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) oldcontext = MemoryContextSwitchTo(aggcontext); state = (JsonAggState *) palloc(sizeof(JsonAggState)); state->str = makeStringInfo(); + if (unique_keys) + json_unique_check_init(&state->unique_check, state->str, 0); + else + memset(&state->unique_check, 0, sizeof(state->unique_check)); MemoryContextSwitchTo(oldcontext); arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1); @@ -909,7 +1052,6 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) else { state = (JsonAggState *) PG_GETARG_POINTER(0); - appendStringInfoString(state->str, ", "); } /* @@ -925,11 +1067,41 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("field name must not be null"))); + /* Skip null values if absent_on_null */ + skip = absent_on_null && PG_ARGISNULL(2); + + if (skip) + { + /* If key uniqueness check is needed we must save skipped keys */ + if (!unique_keys) + PG_RETURN_POINTER(state); + + out = json_unique_check_get_skipped_keys(&state->unique_check); + } + else + { + out = state->str; + + if (out->len > 2) + appendStringInfoString(out, ", "); + } + arg = PG_GETARG_DATUM(1); - datum_to_json(arg, false, state->str, state->key_category, + if (unique_keys) + json_unique_check_save_key_offset(&state->unique_check, out); + + datum_to_json(arg, false, out, state->key_category, state->key_output_func, true); + if (unique_keys) + { + json_unique_check_key(&state->unique_check, out); + + if (skip) + PG_RETURN_POINTER(state); + } + appendStringInfoString(state->str, " : "); if (PG_ARGISNULL(2)) @@ -943,6 +1115,42 @@ json_object_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } +/* + * json_object_agg aggregate function + */ +Datum +json_object_agg_transfn(PG_FUNCTION_ARGS) +{ + return json_object_agg_transfn_worker(fcinfo, false, false); +} + +/* + * json_object_agg_strict aggregate function + */ +Datum +json_object_agg_strict_transfn(PG_FUNCTION_ARGS) +{ + return json_object_agg_transfn_worker(fcinfo, true, false); +} + +/* + * json_object_agg_unique aggregate function + */ +Datum +json_object_agg_unique_transfn(PG_FUNCTION_ARGS) +{ + return json_object_agg_transfn_worker(fcinfo, false, true); +} + +/* + * json_object_agg_unique_strict aggregate function + */ +Datum +json_object_agg_unique_strict_transfn(PG_FUNCTION_ARGS) +{ + return json_object_agg_transfn_worker(fcinfo, true, true); +} + /* * json_object_agg final function. */ @@ -960,6 +1168,8 @@ json_object_agg_finalfn(PG_FUNCTION_ARGS) if (state == NULL) PG_RETURN_NULL(); + json_unique_check_free(&state->unique_check); + /* Else return state with appropriate object terminator added */ PG_RETURN_TEXT_P(catenate_stringinfo_string(state->str, " }")); } @@ -984,25 +1194,14 @@ catenate_stringinfo_string(StringInfo buffer, const char *addon) return result; } -/* - * SQL function json_build_object(variadic "any") - */ Datum -json_build_object(PG_FUNCTION_ARGS) +json_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types, + bool absent_on_null, bool unique_keys) { - int nargs = PG_NARGS(); int i; const char *sep = ""; StringInfo result; - Datum *args; - bool *nulls; - Oid *types; - - /* fetch argument values to build the object */ - nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls); - - if (nargs < 0) - PG_RETURN_NULL(); + JsonUniqueCheckContext unique_check; if (nargs % 2 != 0) ereport(ERROR, @@ -1016,19 +1215,53 @@ json_build_object(PG_FUNCTION_ARGS) appendStringInfoChar(result, '{'); + if (unique_keys) + json_unique_check_init(&unique_check, result, nargs / 2); + for (i = 0; i < nargs; i += 2) { - appendStringInfoString(result, sep); - sep = ", "; + StringInfo out; + bool skip; + + /* Skip null values if absent_on_null */ + skip = absent_on_null && nulls[i + 1]; + + if (skip) + { + /* If key uniqueness check is needed we must save skipped keys */ + if (!unique_keys) + continue; + + out = json_unique_check_get_skipped_keys(&unique_check); + } + else + { + appendStringInfoString(result, sep); + sep = ", "; + out = result; + } /* process key */ if (nulls[i]) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("argument %d cannot be null", i + 1), + errmsg("argument %d cannot be null", i + 1), errhint("Object keys should be text."))); - add_json(args[i], false, result, types[i], true); + if (unique_keys) + /* save key offset before key appending */ + json_unique_check_save_key_offset(&unique_check, out); + + add_json(args[i], false, out, types[i], true); + + if (unique_keys) + { + /* check key uniqueness after key appending */ + json_unique_check_key(&unique_check, out); + + if (skip) + continue; + } appendStringInfoString(result, " : "); @@ -1038,7 +1271,29 @@ json_build_object(PG_FUNCTION_ARGS) appendStringInfoChar(result, '}'); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); + if (unique_keys) + json_unique_check_free(&unique_check); + + return PointerGetDatum(cstring_to_text_with_len(result->data, result->len)); +} + +/* + * SQL function json_build_object(variadic "any") + */ +Datum +json_build_object(PG_FUNCTION_ARGS) +{ + Datum *args; + bool *nulls; + Oid *types; + /* build argument values to build the object */ + int nargs = extract_variadic_args(fcinfo, 0, true, + &args, &types, &nulls); + + if (nargs < 0) + PG_RETURN_NULL(); + + PG_RETURN_DATUM(json_build_object_worker(nargs, args, nulls, types, false, false)); } /* @@ -1050,25 +1305,13 @@ json_build_object_noargs(PG_FUNCTION_ARGS) PG_RETURN_TEXT_P(cstring_to_text_with_len("{}", 2)); } -/* - * SQL function json_build_array(variadic "any") - */ Datum -json_build_array(PG_FUNCTION_ARGS) +json_build_array_worker(int nargs, Datum *args, bool *nulls, Oid *types, + bool absent_on_null) { - int nargs; int i; const char *sep = ""; StringInfo result; - Datum *args; - bool *nulls; - Oid *types; - - /* fetch argument values to build the array */ - nargs = extract_variadic_args(fcinfo, 0, false, &args, &types, &nulls); - - if (nargs < 0) - PG_RETURN_NULL(); result = makeStringInfo(); @@ -1076,6 +1319,9 @@ json_build_array(PG_FUNCTION_ARGS) for (i = 0; i < nargs; i++) { + if (absent_on_null && nulls[i]) + continue; + appendStringInfoString(result, sep); sep = ", "; add_json(args[i], nulls[i], result, types[i], false); @@ -1083,7 +1329,26 @@ json_build_array(PG_FUNCTION_ARGS) appendStringInfoChar(result, ']'); - PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len)); + return PointerGetDatum(cstring_to_text_with_len(result->data, result->len)); +} + +/* + * SQL function json_build_array(variadic "any") + */ +Datum +json_build_array(PG_FUNCTION_ARGS) +{ + Datum *args; + bool *nulls; + Oid *types; + /* build argument values to build the object */ + int nargs = extract_variadic_args(fcinfo, 0, true, + &args, &types, &nulls); + + if (nargs < 0) + PG_RETURN_NULL(); + + PG_RETURN_DATUM(json_build_array_worker(nargs, args, nulls, types, false)); } /* diff --git a/src/backend/utils/adt/jsonb.c b/src/backend/utils/adt/jsonb.c index 1e9ca046c6..2d70812629 100644 --- a/src/backend/utils/adt/jsonb.c +++ b/src/backend/utils/adt/jsonb.c @@ -52,6 +52,16 @@ typedef enum /* type categories for datum_to_jsonb */ JSONBTYPE_OTHER /* all else */ } JsonbTypeCategory; +/* Context for key uniqueness check */ +typedef struct JsonbUniqueCheckContext +{ + JsonbValue *obj; /* object containing skipped keys also */ + int *skipped_keys; /* array of skipped key-value pair indices */ + int skipped_keys_allocated; + int skipped_keys_count; + MemoryContext mcxt; /* context for saving skipped keys */ +} JsonbUniqueCheckContext; + typedef struct JsonbAggState { JsonbInState *res; @@ -59,6 +69,7 @@ typedef struct JsonbAggState Oid key_output_func; JsonbTypeCategory val_category; Oid val_output_func; + JsonbUniqueCheckContext unique_check; } JsonbAggState; static inline Datum jsonb_from_cstring(char *json, int len); @@ -1153,24 +1164,125 @@ to_jsonb(PG_FUNCTION_ARGS) PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); } +static inline void +jsonb_unique_check_init(JsonbUniqueCheckContext *cxt, JsonbValue *obj, + MemoryContext mcxt) +{ + cxt->mcxt = mcxt; + cxt->obj = obj; + cxt->skipped_keys = NULL; + cxt->skipped_keys_count = 0; + cxt->skipped_keys_allocated = 0; +} + /* - * SQL function jsonb_build_object(variadic "any") + * Save the index of the skipped key-value pair that has just been appended + * to the object. + */ +static inline void +jsonb_unique_check_add_skipped(JsonbUniqueCheckContext *cxt) +{ + /* + * Make a room for the skipped index plus one additional index + * (see jsonb_unique_check_remove_skipped_keys()). + */ + if (cxt->skipped_keys_count + 1 >= cxt->skipped_keys_allocated) + { + if (cxt->skipped_keys_allocated) + { + cxt->skipped_keys_allocated *= 2; + cxt->skipped_keys = repalloc(cxt->skipped_keys, + sizeof(*cxt->skipped_keys) * + cxt->skipped_keys_allocated); + } + else + { + cxt->skipped_keys_allocated = 16; + cxt->skipped_keys = MemoryContextAlloc(cxt->mcxt, + sizeof(*cxt->skipped_keys) * + cxt->skipped_keys_allocated); + } + } + + cxt->skipped_keys[cxt->skipped_keys_count++] = cxt->obj->val.object.nPairs; +} + +/* + * Check uniqueness of the key that has just been appended to the object. + */ +static inline void +jsonb_unique_check_key(JsonbUniqueCheckContext *cxt, bool skip) +{ + JsonbPair *pair = cxt->obj->val.object.pairs; + /* nPairs is incremented only after the value is appended */ + JsonbPair *last = &pair[cxt->obj->val.object.nPairs]; + + for (; pair < last; pair++) + if (pair->key.val.string.len == + last->key.val.string.len && + !memcmp(pair->key.val.string.val, + last->key.val.string.val, + last->key.val.string.len)) + ereport(ERROR, + (errcode(ERRCODE_DUPLICATE_JSON_OBJECT_KEY_VALUE), + errmsg("duplicate JSON key \"%*s\"", + last->key.val.string.len, + last->key.val.string.val))); + + if (skip) + { + /* save skipped key index */ + jsonb_unique_check_add_skipped(cxt); + + /* add dummy null value for the skipped key */ + last->value.type = jbvNull; + cxt->obj->val.object.nPairs++; + } +} + +/* + * Remove skipped key-value pairs from the resulting object. */ +static void +jsonb_unique_check_remove_skipped_keys(JsonbUniqueCheckContext *cxt) +{ + int *skipped_keys = cxt->skipped_keys; + int skipped_keys_count = cxt->skipped_keys_count; + + if (!skipped_keys_count) + return; + + if (cxt->obj->val.object.nPairs > skipped_keys_count) + { + /* remove skipped key-value pairs */ + JsonbPair *pairs = cxt->obj->val.object.pairs; + int i; + + /* save total pair count into the last element of skipped_keys */ + Assert(cxt->skipped_keys_count < cxt->skipped_keys_allocated); + cxt->skipped_keys[cxt->skipped_keys_count] = cxt->obj->val.object.nPairs; + + for (i = 0; i < skipped_keys_count; i++) + { + int skipped_key = skipped_keys[i]; + int nkeys = skipped_keys[i + 1] - skipped_key - 1; + + memmove(&pairs[skipped_key - i], + &pairs[skipped_key + 1], + sizeof(JsonbPair) * nkeys); + } + } + + cxt->obj->val.object.nPairs -= skipped_keys_count; +} + Datum -jsonb_build_object(PG_FUNCTION_ARGS) +jsonb_build_object_worker(int nargs, Datum *args, bool *nulls, Oid *types, + bool absent_on_null, bool unique_keys) { - int nargs; int i; JsonbInState result; - Datum *args; - bool *nulls; - Oid *types; - - /* build argument values to build the object */ - nargs = extract_variadic_args(fcinfo, 0, true, &args, &types, &nulls); - - if (nargs < 0) - PG_RETURN_NULL(); + JsonbUniqueCheckContext unique_check; if (nargs % 2 != 0) ereport(ERROR, @@ -1184,23 +1296,65 @@ jsonb_build_object(PG_FUNCTION_ARGS) result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_OBJECT, NULL); + /* if (unique_keys) */ + jsonb_unique_check_init(&unique_check, result.res, CurrentMemoryContext); + for (i = 0; i < nargs; i += 2) { /* process key */ + bool skip; + if (nulls[i]) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("argument %d: key must not be null", i + 1))); + /* skip null values if absent_on_null */ + skip = absent_on_null && nulls[i + 1]; + + /* we need to save skipped keys for the key uniqueness check */ + if (skip && !unique_keys) + continue; + add_jsonb(args[i], false, &result, types[i], true); + if (unique_keys) + { + jsonb_unique_check_key(&unique_check, skip); + + if (skip) + continue; /* do not process the value if the key is skipped */ + } + /* process value */ add_jsonb(args[i + 1], nulls[i + 1], &result, types[i + 1], false); } + if (unique_keys && absent_on_null) + jsonb_unique_check_remove_skipped_keys(&unique_check); + result.res = pushJsonbValue(&result.parseState, WJB_END_OBJECT, NULL); - PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); + return JsonbPGetDatum(JsonbValueToJsonb(result.res)); +} + +/* + * SQL function jsonb_build_object(variadic "any") + */ +Datum +jsonb_build_object(PG_FUNCTION_ARGS) +{ + Datum *args; + bool *nulls; + Oid *types; + /* build argument values to build the object */ + int nargs = extract_variadic_args(fcinfo, 0, true, + &args, &types, &nulls); + + if (nargs < 0) + PG_RETURN_NULL(); + + PG_RETURN_DATUM(jsonb_build_object_worker(nargs, args, nulls, types, false, false)); } /* @@ -1219,37 +1373,50 @@ jsonb_build_object_noargs(PG_FUNCTION_ARGS) PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); } -/* - * SQL function jsonb_build_array(variadic "any") - */ Datum -jsonb_build_array(PG_FUNCTION_ARGS) +jsonb_build_array_worker(int nargs, Datum *args, bool *nulls, Oid *types, + bool absent_on_null) { - int nargs; int i; JsonbInState result; - Datum *args; - bool *nulls; - Oid *types; - - /* build argument values to build the array */ - nargs = extract_variadic_args(fcinfo, 0, true, &args, &types, &nulls); - - if (nargs < 0) - PG_RETURN_NULL(); memset(&result, 0, sizeof(JsonbInState)); result.res = pushJsonbValue(&result.parseState, WJB_BEGIN_ARRAY, NULL); for (i = 0; i < nargs; i++) + { + if (absent_on_null && nulls[i]) + continue; + add_jsonb(args[i], nulls[i], &result, types[i], false); + } result.res = pushJsonbValue(&result.parseState, WJB_END_ARRAY, NULL); - PG_RETURN_POINTER(JsonbValueToJsonb(result.res)); + return JsonbPGetDatum(JsonbValueToJsonb(result.res)); } +/* + * SQL function jsonb_build_array(variadic "any") + */ +Datum +jsonb_build_array(PG_FUNCTION_ARGS) +{ + Datum *args; + bool *nulls; + Oid *types; + /* build argument values to build the object */ + int nargs = extract_variadic_args(fcinfo, 0, true, + &args, &types, &nulls); + + if (nargs < 0) + PG_RETURN_NULL(); + + PG_RETURN_DATUM(jsonb_build_array_worker(nargs, args, nulls, types, false)); +} + + /* * degenerate case of jsonb_build_array where it gets 0 arguments. */ @@ -1501,12 +1668,8 @@ clone_parse_state(JsonbParseState *state) return result; } - -/* - * jsonb_agg aggregate function - */ -Datum -jsonb_agg_transfn(PG_FUNCTION_ARGS) +static Datum +jsonb_agg_transfn_worker(FunctionCallInfo fcinfo, bool absent_on_null) { MemoryContext oldcontext, aggcontext; @@ -1554,6 +1717,9 @@ jsonb_agg_transfn(PG_FUNCTION_ARGS) result = state->res; } + if (absent_on_null && PG_ARGISNULL(1)) + PG_RETURN_POINTER(state); + /* turn the argument into jsonb in the normal function context */ val = PG_ARGISNULL(1) ? (Datum) 0 : PG_GETARG_DATUM(1); @@ -1623,6 +1789,24 @@ jsonb_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } +/* + * jsonb_agg aggregate function + */ +Datum +jsonb_agg_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_agg_transfn_worker(fcinfo, false); +} + +/* + * jsonb_agg_strict aggregate function + */ +Datum +jsonb_agg_strict_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_agg_transfn_worker(fcinfo, true); +} + Datum jsonb_agg_finalfn(PG_FUNCTION_ARGS) { @@ -1655,11 +1839,9 @@ jsonb_agg_finalfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(out); } -/* - * jsonb_object_agg aggregate function - */ -Datum -jsonb_object_agg_transfn(PG_FUNCTION_ARGS) +static Datum +jsonb_object_agg_transfn_worker(FunctionCallInfo fcinfo, + bool absent_on_null, bool unique_keys) { MemoryContext oldcontext, aggcontext; @@ -1673,6 +1855,7 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) *jbval; JsonbValue v; JsonbIteratorToken type; + bool skip; if (!AggCheckCallContext(fcinfo, &aggcontext)) { @@ -1692,6 +1875,11 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) state->res = result; result->res = pushJsonbValue(&result->parseState, WJB_BEGIN_OBJECT, NULL); + if (unique_keys) + jsonb_unique_check_init(&state->unique_check, result->res, + aggcontext); + else + memset(&state->unique_check, 0, sizeof(state->unique_check)); MemoryContextSwitchTo(oldcontext); arg_type = get_fn_expr_argtype(fcinfo->flinfo, 1); @@ -1727,6 +1915,15 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("field name must not be null"))); + /* + * Skip null values if absent_on_null unless key uniqueness check is + * needed (because we must save keys in this case). + */ + skip = absent_on_null && PG_ARGISNULL(2); + + if (skip && !unique_keys) + PG_RETURN_POINTER(state); + val = PG_GETARG_DATUM(1); memset(&elem, 0, sizeof(JsonbInState)); @@ -1782,6 +1979,18 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) } result->res = pushJsonbValue(&result->parseState, WJB_KEY, &v); + + if (unique_keys) + { + jsonb_unique_check_key(&state->unique_check, skip); + + if (skip) + { + MemoryContextSwitchTo(oldcontext); + PG_RETURN_POINTER(state); + } + } + break; case WJB_END_ARRAY: break; @@ -1854,6 +2063,43 @@ jsonb_object_agg_transfn(PG_FUNCTION_ARGS) PG_RETURN_POINTER(state); } +/* + * jsonb_object_agg aggregate function + */ +Datum +jsonb_object_agg_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_object_agg_transfn_worker(fcinfo, false, false); +} + + +/* + * jsonb_object_agg_strict aggregate function + */ +Datum +jsonb_object_agg_strict_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_object_agg_transfn_worker(fcinfo, true, false); +} + +/* + * jsonb_object_agg_unique aggregate function + */ +Datum +jsonb_object_agg_unique_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_object_agg_transfn_worker(fcinfo, false, true); +} + +/* + * jsonb_object_agg_unique_strict aggregate function + */ +Datum +jsonb_object_agg_unique_strict_transfn(PG_FUNCTION_ARGS) +{ + return jsonb_object_agg_transfn_worker(fcinfo, true, true); +} + Datum jsonb_object_agg_finalfn(PG_FUNCTION_ARGS) { diff --git a/src/backend/utils/adt/ruleutils.c b/src/backend/utils/adt/ruleutils.c index 15ff3f4798..99249c650c 100644 --- a/src/backend/utils/adt/ruleutils.c +++ b/src/backend/utils/adt/ruleutils.c @@ -448,6 +448,10 @@ static void get_coercion_expr(Node *arg, deparse_context *context, Node *parentNode); static void get_const_expr(Const *constval, deparse_context *context, int showtype); +static void get_json_ctor_expr(JsonCtorExpr *ctor, deparse_context *context, + bool showimplicit); +static void get_json_agg_ctor_expr(JsonCtorExpr *ctor, deparse_context *context, + const char *funcname, bool is_json_objectagg); static void get_const_collation(Const *constval, deparse_context *context); static void simple_quote_literal(StringInfo buf, const char *val); static void get_sublink_expr(SubLink *sublink, deparse_context *context); @@ -5820,7 +5824,8 @@ get_rule_sortgroupclause(Index ref, List *tlist, bool force_colno, bool need_paren = (PRETTY_PAREN(context) || IsA(expr, FuncExpr) || IsA(expr, Aggref) - || IsA(expr, WindowFunc)); + || IsA(expr, WindowFunc) + || IsA(expr, JsonCtorExpr)); if (need_paren) appendStringInfoChar(context->buf, '('); @@ -7617,6 +7622,7 @@ isSimpleNode(Node *node, Node *parentNode, int prettyFlags) case T_Aggref: case T_WindowFunc: case T_FuncExpr: + case T_JsonCtorExpr: /* function-like: name(..) or name[..] */ return true; @@ -7902,12 +7908,12 @@ get_rule_expr_paren(Node *node, deparse_context *context, * get_json_format - Parse back a JsonFormat node */ static void -get_json_format(JsonFormat *format, deparse_context *context) +get_json_format(JsonFormat *format, StringInfo buf) { if (format->format == JS_FORMAT_DEFAULT) return; - appendStringInfoString(context->buf, + appendStringInfoString(buf, format->format == JS_FORMAT_JSONB ? " FORMAT JSONB" : " FORMAT JSON"); @@ -7917,7 +7923,7 @@ get_json_format(JsonFormat *format, deparse_context *context) format->encoding == JS_ENC_UTF16 ? "UTF16" : format->encoding == JS_ENC_UTF32 ? "UTF32" : "UTF8"; - appendStringInfo(context->buf, " ENCODING %s", encoding); + appendStringInfo(buf, " ENCODING %s", encoding); } } @@ -7925,20 +7931,20 @@ get_json_format(JsonFormat *format, deparse_context *context) * get_json_returning - Parse back a JsonReturning structure */ static void -get_json_returning(JsonReturning *returning, deparse_context *context, +get_json_returning(JsonReturning *returning, StringInfo buf, bool json_format_by_default) { if (!OidIsValid(returning->typid)) return; - appendStringInfo(context->buf, " RETURNING %s", + appendStringInfo(buf, " RETURNING %s", format_type_with_typemod(returning->typid, returning->typmod)); if (!json_format_by_default || returning->format->format != (returning->typid == JSONBOID ? JS_FORMAT_JSONB : JS_FORMAT_JSON)) - get_json_format(returning->format, context); + get_json_format(returning->format, buf); } /* ---------- @@ -9118,10 +9124,14 @@ get_rule_expr(Node *node, deparse_context *context, JsonValueExpr *jve = (JsonValueExpr *) node; get_rule_expr((Node *) jve->raw_expr, context, false); - get_json_format(jve->format, context); + get_json_format(jve->format, context->buf); } break; + case T_JsonCtorExpr: + get_json_ctor_expr((JsonCtorExpr *) node, context, false); + break; + case T_List: { char *sep; @@ -9373,17 +9383,88 @@ get_func_expr(FuncExpr *expr, deparse_context *context, appendStringInfoChar(buf, ')'); } +static void +get_json_ctor_options(JsonCtorExpr *ctor, StringInfo buf) +{ + if (ctor->absent_on_null) + { + if (ctor->type == JSCTOR_JSON_OBJECT || + ctor->type == JSCTOR_JSON_OBJECTAGG) + appendStringInfoString(buf, " ABSENT ON NULL"); + } + else + { + if (ctor->type == JSCTOR_JSON_ARRAY || + ctor->type == JSCTOR_JSON_ARRAYAGG) + appendStringInfoString(buf, " NULL ON NULL"); + } + + if (ctor->unique) + appendStringInfoString(buf, " WITH UNIQUE KEYS"); + + get_json_returning(ctor->returning, buf, true); +} + +static void +get_json_ctor_expr(JsonCtorExpr *ctor, deparse_context *context, bool showimplicit) +{ + StringInfo buf = context->buf; + const char *funcname; + int nargs; + ListCell *lc; + + switch (ctor->type) + { + case JSCTOR_JSON_OBJECT: + funcname = "JSON_OBJECT"; + break; + case JSCTOR_JSON_ARRAY: + funcname = "JSON_ARRAY"; + break; + case JSCTOR_JSON_OBJECTAGG: + return get_json_agg_ctor_expr(ctor, context, "JSON_OBJECTAGG", true); + case JSCTOR_JSON_ARRAYAGG: + return get_json_agg_ctor_expr(ctor, context, "JSON_ARRAYAGG", false); + default: + elog(ERROR, "invalid JsonCtorExprType %d", ctor->type); + } + + appendStringInfo(buf, "%s(", funcname); + + nargs = 0; + foreach(lc, ctor->args) + { + if (nargs > 0) + { + const char *sep = ctor->type == JSCTOR_JSON_OBJECT && + (nargs % 2) != 0 ? " : " : ", "; + + appendStringInfoString(buf, sep); + } + + get_rule_expr((Node *) lfirst(lc), context, true); + + nargs++; + } + + get_json_ctor_options(ctor, buf); + + appendStringInfo(buf, ")"); +} + + /* - * get_agg_expr - Parse back an Aggref node + * get_agg_expr_helper - Parse back an Aggref node */ static void -get_agg_expr(Aggref *aggref, deparse_context *context, - Aggref *original_aggref) +get_agg_expr_helper(Aggref *aggref, deparse_context *context, + Aggref *original_aggref, const char *funcname, + const char *options, bool is_json_objectagg) { StringInfo buf = context->buf; Oid argtypes[FUNC_MAX_ARGS]; int nargs; - bool use_variadic; + bool use_variadic = false; /* * For a combining aggregate, we look up and deparse the corresponding @@ -9413,13 +9494,14 @@ get_agg_expr(Aggref *aggref, deparse_context *context, /* Extract the argument types as seen by the parser */ nargs = get_aggregate_argtypes(aggref, argtypes); + if (!funcname) + funcname = generate_function_name(aggref->aggfnoid, nargs, NIL, + argtypes, aggref->aggvariadic, + &use_variadic, + context->special_exprkind); + /* Print the aggregate name, schema-qualified if needed */ - appendStringInfo(buf, "%s(%s", - generate_function_name(aggref->aggfnoid, nargs, - NIL, argtypes, - aggref->aggvariadic, - &use_variadic, - context->special_exprkind), + appendStringInfo(buf, "%s(%s", funcname, (aggref->aggdistinct != NIL) ? "DISTINCT " : ""); if (AGGKIND_IS_ORDERED_SET(aggref->aggkind)) @@ -9455,7 +9537,17 @@ get_agg_expr(Aggref *aggref, deparse_context *context, if (tle->resjunk) continue; if (i++ > 0) - appendStringInfoString(buf, ", "); + { + if (is_json_objectagg) + { + if (i > 2) + break; /* skip ABSENT ON NULL and WITH UNIQUE args */ + + appendStringInfoString(buf, " : "); + } + else + appendStringInfoString(buf, ", "); + } if (use_variadic && i == nargs) appendStringInfoString(buf, "VARIADIC "); get_rule_expr(arg, context, true); @@ -9469,6 +9561,9 @@ get_agg_expr(Aggref *aggref, deparse_context *context, } } + if (options) + appendStringInfoString(buf, options); + if (aggref->aggfilter != NULL) { appendStringInfoString(buf, ") FILTER (WHERE "); @@ -9478,6 +9573,16 @@ get_agg_expr(Aggref *aggref, deparse_context *context, appendStringInfoChar(buf, ')'); } +/* + * get_agg_expr - Parse back an Aggref node + */ +static void +get_agg_expr(Aggref *aggref, deparse_context *context, Aggref *original_aggref) +{ + return get_agg_expr_helper(aggref, context, original_aggref, NULL, NULL, + false); +} + /* * This is a helper function for get_agg_expr(). It's used when we deparse * a combining Aggref; resolve_special_varno locates the corresponding partial @@ -9497,10 +9602,12 @@ get_agg_combine_expr(Node *node, deparse_context *context, void *callback_arg) } /* - * get_windowfunc_expr - Parse back a WindowFunc node + * get_windowfunc_expr_helper - Parse back a WindowFunc node */ static void -get_windowfunc_expr(WindowFunc *wfunc, deparse_context *context) +get_windowfunc_expr_helper(WindowFunc *wfunc, deparse_context *context, + const char *funcname, const char *options, + bool is_json_objectagg) { StringInfo buf = context->buf; Oid argtypes[FUNC_MAX_ARGS]; @@ -9524,16 +9631,30 @@ get_windowfunc_expr(WindowFunc *wfunc, deparse_context *context) nargs++; } - appendStringInfo(buf, "%s(", - generate_function_name(wfunc->winfnoid, nargs, - argnames, argtypes, - false, NULL, - context->special_exprkind)); + if (!funcname) + funcname = generate_function_name(wfunc->winfnoid, nargs, argnames, + argtypes, false, NULL, + context->special_exprkind); + + appendStringInfo(buf, "%s(", funcname); + /* winstar can be set only in zero-argument aggregates */ if (wfunc->winstar) appendStringInfoChar(buf, '*'); else - get_rule_expr((Node *) wfunc->args, context, true); + { + if (is_json_objectagg) + { + get_rule_expr((Node *) linitial(wfunc->args), context, false); + appendStringInfoString(buf, " : "); + get_rule_expr((Node *) lsecond(wfunc->args), context, false); + } + else + get_rule_expr((Node *) wfunc->args, context, true); + } + + if (options) + appendStringInfoString(buf, options); if (wfunc->aggfilter != NULL) { @@ -9570,6 +9691,39 @@ get_windowfunc_expr(WindowFunc *wfunc, deparse_context *context) } } +/* + * get_windowfunc_expr - Parse back a WindowFunc node + */ +static void +get_windowfunc_expr(WindowFunc *wfunc, deparse_context *context) +{ + return get_windowfunc_expr_helper(wfunc, context, NULL, NULL, false); +} + +/* + * get_json_agg_ctor_expr - Parse back an aggregate JsonCtorExpr node + */ +static void +get_json_agg_ctor_expr(JsonCtorExpr *ctor, deparse_context *context, + const char *funcname, bool is_json_objectagg) +{ + StringInfoData options; + + initStringInfo(&options); + get_json_ctor_options(ctor, &options); + + if (IsA(ctor->func, Aggref)) + return get_agg_expr_helper((Aggref *) ctor->func, context, + (Aggref *) ctor->func, + funcname, options.data, is_json_objectagg); + else if (IsA(ctor->func, WindowFunc)) + return get_windowfunc_expr_helper((WindowFunc *) ctor->func, context, + funcname, options.data, is_json_objectagg); + else + elog(ERROR, "invalid JsonCtorExpr underlying node type: %d", + nodeTag(ctor->func)); +} + /* ---------- * get_coercion_expr * diff --git a/src/include/catalog/pg_aggregate.dat b/src/include/catalog/pg_aggregate.dat index ffabe275c0..11ecfdf889 100644 --- a/src/include/catalog/pg_aggregate.dat +++ b/src/include/catalog/pg_aggregate.dat @@ -541,14 +541,36 @@ # json { aggfnoid => 'json_agg', aggtransfn => 'json_agg_transfn', aggfinalfn => 'json_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'json_agg_strict', aggtransfn => 'json_agg_strict_transfn', + aggfinalfn => 'json_agg_finalfn', aggtranstype => 'internal' }, { aggfnoid => 'json_object_agg', aggtransfn => 'json_object_agg_transfn', aggfinalfn => 'json_object_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'json_object_agg_unique', + aggtransfn => 'json_object_agg_unique_transfn', + aggfinalfn => 'json_object_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'json_object_agg_strict', + aggtransfn => 'json_object_agg_strict_transfn', + aggfinalfn => 'json_object_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'json_object_agg_unique_strict', + aggtransfn => 'json_object_agg_unique_strict_transfn', + aggfinalfn => 'json_object_agg_finalfn', aggtranstype => 'internal' }, # jsonb { aggfnoid => 'jsonb_agg', aggtransfn => 'jsonb_agg_transfn', aggfinalfn => 'jsonb_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'jsonb_agg_strict', aggtransfn => 'jsonb_agg_strict_transfn', + aggfinalfn => 'jsonb_agg_finalfn', aggtranstype => 'internal' }, { aggfnoid => 'jsonb_object_agg', aggtransfn => 'jsonb_object_agg_transfn', aggfinalfn => 'jsonb_object_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'jsonb_object_agg_unique', + aggtransfn => 'jsonb_object_agg_unique_transfn', + aggfinalfn => 'jsonb_object_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'jsonb_object_agg_strict', + aggtransfn => 'jsonb_object_agg_strict_transfn', + aggfinalfn => 'jsonb_object_agg_finalfn', aggtranstype => 'internal' }, +{ aggfnoid => 'jsonb_object_agg_unique_strict', + aggtransfn => 'jsonb_object_agg_unique_strict_transfn', + aggfinalfn => 'jsonb_object_agg_finalfn', aggtranstype => 'internal' }, # ordered-set and hypothetical-set aggregates { aggfnoid => 'percentile_disc(float8,anyelement)', aggkind => 'o', diff --git a/src/include/catalog/pg_proc.dat b/src/include/catalog/pg_proc.dat index 95604e988a..cbe2d3e36c 100644 --- a/src/include/catalog/pg_proc.dat +++ b/src/include/catalog/pg_proc.dat @@ -8368,6 +8368,10 @@ proname => 'json_agg_transfn', proisstrict => 'f', provolatile => 's', prorettype => 'internal', proargtypes => 'internal anyelement', prosrc => 'json_agg_transfn' }, +{ oid => '8173', descr => 'json aggregate transition function', + proname => 'json_agg_strict_transfn', proisstrict => 'f', provolatile => 's', + prorettype => 'internal', proargtypes => 'internal anyelement', + prosrc => 'json_agg_strict_transfn' }, { oid => '3174', descr => 'json aggregate final function', proname => 'json_agg_finalfn', proisstrict => 'f', prorettype => 'json', proargtypes => 'internal', prosrc => 'json_agg_finalfn' }, @@ -8375,10 +8379,26 @@ proname => 'json_agg', prokind => 'a', proisstrict => 'f', provolatile => 's', prorettype => 'json', proargtypes => 'anyelement', prosrc => 'aggregate_dummy' }, +{ oid => '8174', descr => 'aggregate input into json', + proname => 'json_agg_strict', prokind => 'a', proisstrict => 'f', + provolatile => 's', prorettype => 'json', proargtypes => 'anyelement', + prosrc => 'aggregate_dummy' }, { oid => '3180', descr => 'json object aggregate transition function', proname => 'json_object_agg_transfn', proisstrict => 'f', provolatile => 's', prorettype => 'internal', proargtypes => 'internal any any', prosrc => 'json_object_agg_transfn' }, +{ oid => '8175', descr => 'json object aggregate transition function', + proname => 'json_object_agg_strict_transfn', proisstrict => 'f', + provolatile => 's', prorettype => 'internal', proargtypes => 'internal any any', + prosrc => 'json_object_agg_strict_transfn' }, +{ oid => '8176', descr => 'json object aggregate transition function', + proname => 'json_object_agg_unique_transfn', proisstrict => 'f', + provolatile => 's', prorettype => 'internal', proargtypes => 'internal any any', + prosrc => 'json_object_agg_unique_transfn' }, +{ oid => '8177', descr => 'json object aggregate transition function', + proname => 'json_object_agg_unique_strict_transfn', proisstrict => 'f', + provolatile => 's', prorettype => 'internal', proargtypes => 'internal any any', + prosrc => 'json_object_agg_unique_strict_transfn' }, { oid => '3196', descr => 'json object aggregate final function', proname => 'json_object_agg_finalfn', proisstrict => 'f', prorettype => 'json', proargtypes => 'internal', @@ -8387,6 +8407,19 @@ proname => 'json_object_agg', prokind => 'a', proisstrict => 'f', provolatile => 's', prorettype => 'json', proargtypes => 'any any', prosrc => 'aggregate_dummy' }, +{ oid => '8178', descr => 'aggregate non-NULL input into a json object', + proname => 'json_object_agg_strict', prokind => 'a', proisstrict => 'f', + provolatile => 's', prorettype => 'json', proargtypes => 'any any', + prosrc => 'aggregate_dummy' }, +{ oid => '8179', descr => 'aggregate input into a json object with unique keys', + proname => 'json_object_agg_unique', prokind => 'a', proisstrict => 'f', + provolatile => 's', prorettype => 'json', proargtypes => 'any any', + prosrc => 'aggregate_dummy' }, +{ oid => '8180', + descr => 'aggregate non-NULL input into a json object with unique keys', + proname => 'json_object_agg_unique_strict', prokind => 'a', + proisstrict => 'f', provolatile => 's', prorettype => 'json', + proargtypes => 'any any', prosrc => 'aggregate_dummy' }, { oid => '3198', descr => 'build a json array from any inputs', proname => 'json_build_array', provariadic => 'any', proisstrict => 'f', provolatile => 's', prorettype => 'json', proargtypes => 'any', @@ -9242,6 +9275,10 @@ proname => 'jsonb_agg_transfn', proisstrict => 'f', provolatile => 's', prorettype => 'internal', proargtypes => 'internal anyelement', prosrc => 'jsonb_agg_transfn' }, +{ oid => '8181', descr => 'jsonb aggregate transition function', + proname => 'jsonb_agg_strict_transfn', proisstrict => 'f', provolatile => 's', + prorettype => 'internal', proargtypes => 'internal anyelement', + prosrc => 'jsonb_agg_strict_transfn' }, { oid => '3266', descr => 'jsonb aggregate final function', proname => 'jsonb_agg_finalfn', proisstrict => 'f', provolatile => 's', prorettype => 'jsonb', proargtypes => 'internal', @@ -9250,10 +9287,29 @@ proname => 'jsonb_agg', prokind => 'a', proisstrict => 'f', provolatile => 's', prorettype => 'jsonb', proargtypes => 'anyelement', prosrc => 'aggregate_dummy' }, +{ oid => '8182', descr => 'aggregate input into jsonb skipping nulls', + proname => 'jsonb_agg_strict', prokind => 'a', proisstrict => 'f', + provolatile => 's', prorettype => 'jsonb', proargtypes => 'anyelement', + prosrc => 'aggregate_dummy' }, { oid => '3268', descr => 'jsonb object aggregate transition function', proname => 'jsonb_object_agg_transfn', proisstrict => 'f', provolatile => 's', prorettype => 'internal', proargtypes => 'internal any any', prosrc => 'jsonb_object_agg_transfn' }, +{ oid => '8183', descr => 'jsonb object aggregate transition function', + proname => 'jsonb_object_agg_strict_transfn', proisstrict => 'f', + provolatile => 's', prorettype => 'internal', + proargtypes => 'internal any any', + prosrc => 'jsonb_object_agg_strict_transfn' }, +{ oid => '8184', descr => 'jsonb object aggregate transition function', + proname => 'jsonb_object_agg_unique_transfn', proisstrict => 'f', + provolatile => 's', prorettype => 'internal', + proargtypes => 'internal any any', + prosrc => 'jsonb_object_agg_unique_transfn' }, +{ oid => '8185', descr => 'jsonb object aggregate transition function', + proname => 'jsonb_object_agg_unique_strict_transfn', proisstrict => 'f', + provolatile => 's', prorettype => 'internal', + proargtypes => 'internal any any', + prosrc => 'jsonb_object_agg_unique_strict_transfn' }, { oid => '3269', descr => 'jsonb object aggregate final function', proname => 'jsonb_object_agg_finalfn', proisstrict => 'f', provolatile => 's', prorettype => 'jsonb', proargtypes => 'internal', @@ -9262,6 +9318,20 @@ proname => 'jsonb_object_agg', prokind => 'a', proisstrict => 'f', prorettype => 'jsonb', proargtypes => 'any any', prosrc => 'aggregate_dummy' }, +{ oid => '8186', descr => 'aggregate non-NULL inputs into jsonb object', + proname => 'jsonb_object_agg_strict', prokind => 'a', proisstrict => 'f', + prorettype => 'jsonb', proargtypes => 'any any', + prosrc => 'aggregate_dummy' }, +{ oid => '8187', + descr => 'aggregate inputs into jsonb object checking key uniqueness', + proname => 'jsonb_object_agg_unique', prokind => 'a', proisstrict => 'f', + prorettype => 'jsonb', proargtypes => 'any any', + prosrc => 'aggregate_dummy' }, +{ oid => '8188', + descr => 'aggregate non-NULL inputs into jsonb object checking key uniqueness', + proname => 'jsonb_object_agg_unique_strict', prokind => 'a', + proisstrict => 'f', prorettype => 'jsonb', proargtypes => 'any any', + prosrc => 'aggregate_dummy' }, { oid => '3271', descr => 'build a jsonb array from any inputs', proname => 'jsonb_build_array', provariadic => 'any', proisstrict => 'f', provolatile => 's', prorettype => 'jsonb', proargtypes => 'any', diff --git a/src/include/executor/execExpr.h b/src/include/executor/execExpr.h index dbe8649a57..aa25f95f8a 100644 --- a/src/include/executor/execExpr.h +++ b/src/include/executor/execExpr.h @@ -219,6 +219,7 @@ typedef enum ExprEvalOp EEOP_WINDOW_FUNC, EEOP_SUBPLAN, EEOP_ALTERNATIVE_SUBPLAN, + EEOP_JSON_CTOR, /* aggregation related nodes */ EEOP_AGG_STRICT_DESERIALIZE, @@ -640,6 +641,17 @@ typedef struct ExprEvalStep int transno; int setoff; } agg_trans; + + /* for EEOP_JSON_CTOR */ + struct + { + JsonCtorExpr *ctor; + Datum *arg_values; + bool *arg_nulls; + Oid *arg_types; + int nargs; + } json_ctor; + } d; } ExprEvalStep; diff --git a/src/include/nodes/makefuncs.h b/src/include/nodes/makefuncs.h index a529056fad..8a10a1d71f 100644 --- a/src/include/nodes/makefuncs.h +++ b/src/include/nodes/makefuncs.h @@ -108,6 +108,7 @@ extern VacuumRelation *makeVacuumRelation(RangeVar *relation, Oid oid, List *va_ extern JsonFormat *makeJsonFormat(JsonFormatType type, JsonEncoding encoding, int location); extern JsonValueExpr *makeJsonValueExpr(Expr *expr, JsonFormat *format); +extern Node *makeJsonKeyValue(Node *key, Node *value); extern JsonEncoding makeJsonEncoding(char *name); #endif /* MAKEFUNC_H */ diff --git a/src/include/nodes/nodes.h b/src/include/nodes/nodes.h index 3557d35d42..f686920bf1 100644 --- a/src/include/nodes/nodes.h +++ b/src/include/nodes/nodes.h @@ -201,6 +201,7 @@ typedef enum NodeTag T_JsonFormat, T_JsonReturning, T_JsonValueExpr, + T_JsonCtorExpr, /* * TAGS FOR EXPRESSION STATE NODES (execnodes.h) @@ -484,6 +485,12 @@ typedef enum NodeTag T_PartitionRangeDatum, T_PartitionCmd, T_VacuumRelation, + T_JsonObjectCtor, + T_JsonArrayCtor, + T_JsonArrayQueryCtor, + T_JsonObjectAgg, + T_JsonArrayAgg, + T_JsonKeyValue, T_JsonOutput, /* diff --git a/src/include/nodes/parsenodes.h b/src/include/nodes/parsenodes.h index 439af7bc02..ead2763edf 100644 --- a/src/include/nodes/parsenodes.h +++ b/src/include/nodes/parsenodes.h @@ -1496,9 +1496,101 @@ typedef struct JsonOutput { NodeTag type; TypeName *typeName; /* RETURNING type name, if specified */ - JsonReturning returning; /* RETURNING FORMAT clause and type Oids */ + JsonReturning *returning; /* RETURNING FORMAT clause and type Oids */ } JsonOutput; +/* + * JsonKeyValue - + * untransformed representation of JSON object key-value pair for + * JSON_OBJECT() and JSON_OBJECTAGG() + */ +typedef struct JsonKeyValue +{ + NodeTag type; + Expr *key; /* key expression */ + JsonValueExpr *value; /* JSON value expression */ +} JsonKeyValue; + +/* + * JsonObjectCtor - + * untransformed representation of JSON_OBJECT() constructor + */ +typedef struct JsonObjectCtor +{ + NodeTag type; + List *exprs; /* list of JsonKeyValue pairs */ + JsonOutput *output; /* RETURNING clause, if specified */ + bool absent_on_null; /* skip NULL values? */ + bool unique; /* check key uniqueness? */ + int location; /* token location, or -1 if unknown */ +} JsonObjectCtor; + +/* + * JsonArrayCtor - + * untransformed representation of JSON_ARRAY(element,...) constructor + */ +typedef struct JsonArrayCtor +{ + NodeTag type; + List *exprs; /* list of JsonValueExpr elements */ + JsonOutput *output; /* RETURNING clause, if specified */ + bool absent_on_null; /* skip NULL elements? */ + int location; /* token location, or -1 if unknown */ +} JsonArrayCtor; + +/* + * JsonArrayQueryCtor - + * untransformed representation of JSON_ARRAY(subquery) constructor + */ +typedef struct JsonArrayQueryCtor +{ + NodeTag type; + Node *query; /* subquery */ + JsonOutput *output; /* RETURNING clause, if specified */ + JsonFormat *format; /* FORMAT clause for subquery, if specified */ + bool absent_on_null; /* skip NULL elements? */ + int location; /* token location, or -1 if unknown */ +} JsonArrayQueryCtor; + +/* + * JsonAggCtor - + * common fields of untransformed representation of + * JSON_ARRAYAGG() and JSON_OBJECTAGG() + */ +typedef struct JsonAggCtor +{ + NodeTag type; + JsonOutput *output; /* RETURNING clause, if any */ + Node *agg_filter; /* FILTER clause, if any */ + List *agg_order; /* ORDER BY clause, if any */ + struct WindowDef *over; /* OVER clause, if any */ + int location; /* token location, or -1 if unknown */ +} JsonAggCtor; + +/* + * JsonObjectAgg - + * untransformed representation of JSON_OBJECTAGG() + */ +typedef struct JsonObjectAgg +{ + JsonAggCtor ctor; /* common fields */ + JsonKeyValue *arg; /* object key-value pair */ + bool absent_on_null; /* skip NULL values? */ + bool unique; /* check key uniqueness? */ +} JsonObjectAgg; + +/* + * JsonArrayAgg - + * untransformed representation of JSON_ARRRAYAGG() + */ +typedef struct JsonArrayAgg +{ + JsonAggCtor ctor; /* common fields */ + JsonValueExpr *arg; /* array element expression */ + bool absent_on_null; /* skip NULL elements? */ +} JsonArrayAgg; + + /***************************************************************************** * Raw Grammar Output Statements *****************************************************************************/ diff --git a/src/include/nodes/primnodes.h b/src/include/nodes/primnodes.h index 2f74822c61..9dec5ddb93 100644 --- a/src/include/nodes/primnodes.h +++ b/src/include/nodes/primnodes.h @@ -1248,6 +1248,31 @@ typedef struct JsonValueExpr JsonFormat *format; /* FORMAT clause, if specified */ } JsonValueExpr; +typedef enum JsonCtorType +{ + JSCTOR_JSON_OBJECT = 1, + JSCTOR_JSON_ARRAY = 2, + JSCTOR_JSON_OBJECTAGG = 3, + JSCTOR_JSON_ARRAYAGG = 4 +} JsonCtorType; + +/* + * JsonCtorExpr - + * wrapper over FuncExpr/Aggref/WindowFunc for SQL/JSON constructors + */ +typedef struct JsonCtorExpr +{ + Expr xpr; + JsonCtorType type; /* constructor type */ + List *args; + Expr *func; /* underlying json[b]_xxx() function call */ + Expr *coercion; /* coercion to RETURNING type */ + JsonReturning *returning; /* RETURNING clause */ + bool absent_on_null; /* ABSENT ON NULL? */ + bool unique; /* WITH UNIQUE KEYS? (JSON_OBJECT[AGG] only) */ + int location; +} JsonCtorExpr; + /* ---------------- * NullTest * diff --git a/src/include/parser/kwlist.h b/src/include/parser/kwlist.h index 0ab0341b1c..82e84164aa 100644 --- a/src/include/parser/kwlist.h +++ b/src/include/parser/kwlist.h @@ -26,6 +26,7 @@ /* name, value, category */ PG_KEYWORD("abort", ABORT_P, UNRESERVED_KEYWORD) +PG_KEYWORD("absent", ABSENT, UNRESERVED_KEYWORD) PG_KEYWORD("absolute", ABSOLUTE_P, UNRESERVED_KEYWORD) PG_KEYWORD("access", ACCESS, UNRESERVED_KEYWORD) PG_KEYWORD("action", ACTION, UNRESERVED_KEYWORD) @@ -223,7 +224,12 @@ PG_KEYWORD("isnull", ISNULL, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("isolation", ISOLATION, UNRESERVED_KEYWORD) PG_KEYWORD("join", JOIN, TYPE_FUNC_NAME_KEYWORD) PG_KEYWORD("json", JSON, UNRESERVED_KEYWORD) +PG_KEYWORD("json_array", JSON_ARRAY, COL_NAME_KEYWORD) +PG_KEYWORD("json_arrayagg", JSON_ARRAYAGG, COL_NAME_KEYWORD) +PG_KEYWORD("json_object", JSON_OBJECT, COL_NAME_KEYWORD) +PG_KEYWORD("json_objectagg", JSON_OBJECTAGG, COL_NAME_KEYWORD) PG_KEYWORD("key", KEY, UNRESERVED_KEYWORD) +PG_KEYWORD("keys", KEYS, UNRESERVED_KEYWORD) PG_KEYWORD("label", LABEL, UNRESERVED_KEYWORD) PG_KEYWORD("language", LANGUAGE, UNRESERVED_KEYWORD) PG_KEYWORD("large", LARGE_P, UNRESERVED_KEYWORD) diff --git a/src/include/utils/json.h b/src/include/utils/json.h index 4345fbdc31..6c69c2d550 100644 --- a/src/include/utils/json.h +++ b/src/include/utils/json.h @@ -20,5 +20,10 @@ extern void escape_json(StringInfo buf, const char *str); extern char *JsonEncodeDateTime(char *buf, Datum value, Oid typid, const int *tzp); +extern Datum json_build_object_worker(int nargs, Datum *args, bool *nulls, + Oid *types, bool absent_on_null, + bool unique_keys); +extern Datum json_build_array_worker(int nargs, Datum *args, bool *nulls, + Oid *types, bool absent_on_null); #endif /* JSON_H */ diff --git a/src/include/utils/jsonb.h b/src/include/utils/jsonb.h index 5860011693..9053ad78f6 100644 --- a/src/include/utils/jsonb.h +++ b/src/include/utils/jsonb.h @@ -407,5 +407,10 @@ extern char *JsonbToCStringIndent(StringInfo out, JsonbContainer *in, extern bool JsonbExtractScalar(JsonbContainer *jbc, JsonbValue *res); extern const char *JsonbTypeName(JsonbValue *jb); +extern Datum jsonb_build_object_worker(int nargs, Datum *args, bool *nulls, + Oid *types, bool absent_on_null, + bool unique_keys); +extern Datum jsonb_build_array_worker(int nargs, Datum *args, bool *nulls, + Oid *types, bool absent_on_null); #endif /* __JSONB_H__ */ diff --git a/src/interfaces/ecpg/preproc/parse.pl b/src/interfaces/ecpg/preproc/parse.pl index 1a76b2d326..50b818c1d2 100644 --- a/src/interfaces/ecpg/preproc/parse.pl +++ b/src/interfaces/ecpg/preproc/parse.pl @@ -46,6 +46,8 @@ my %replace_string = ( 'NOT_LA' => 'not', 'NULLS_LA' => 'nulls', 'WITH_LA' => 'with', + 'WITH_LA_UNIQUE' => 'with', + 'WITHOUT_LA' => 'without', 'TYPECAST' => '::', 'DOT_DOT' => '..', 'COLON_EQUALS' => ':=', diff --git a/src/interfaces/ecpg/preproc/parser.c b/src/interfaces/ecpg/preproc/parser.c index a2eeeba217..49dfbf1c29 100644 --- a/src/interfaces/ecpg/preproc/parser.c +++ b/src/interfaces/ecpg/preproc/parser.c @@ -83,6 +83,7 @@ filtered_base_yylex(void) case WITH: case UIDENT: case USCONST: + case WITHOUT: break; default: return cur_token; @@ -143,6 +144,19 @@ filtered_base_yylex(void) case ORDINALITY: cur_token = WITH_LA; break; + case UNIQUE: + cur_token = WITH_LA_UNIQUE; + break; + } + break; + + case WITHOUT: + /* Replace WITHOUT by WITHOUT_LA if it's followed by TIME */ + switch (next_token) + { + case TIME: + cur_token = WITHOUT_LA; + break; } break; case UIDENT: diff --git a/src/test/regress/expected/opr_sanity.out b/src/test/regress/expected/opr_sanity.out index 27056d70d3..42b126818d 100644 --- a/src/test/regress/expected/opr_sanity.out +++ b/src/test/regress/expected/opr_sanity.out @@ -1490,8 +1490,10 @@ WHERE a.aggfnoid = p.oid AND NOT binary_coercible(p.proargtypes[1], ptr.proargtypes[2])) OR (p.pronargs > 2 AND NOT binary_coercible(p.proargtypes[2], ptr.proargtypes[3])) - -- we could carry the check further, but 3 args is enough for now - OR (p.pronargs > 3) + OR (p.pronargs > 3 AND + NOT binary_coercible(p.proargtypes[3], ptr.proargtypes[4])) + -- we could carry the check further, but 4 args is enough for now + OR (p.pronargs > 4) ); aggfnoid | proname | oid | proname ----------+---------+-----+--------- diff --git a/src/test/regress/expected/sqljson.out b/src/test/regress/expected/sqljson.out new file mode 100644 index 0000000000..8fbdce8161 --- /dev/null +++ b/src/test/regress/expected/sqljson.out @@ -0,0 +1,742 @@ +-- JSON_OBJECT() +SELECT JSON_OBJECT(); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING json); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING json FORMAT JSON); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING jsonb); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING jsonb FORMAT JSON); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING text); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING text FORMAT JSON); + json_object +------------- + {} +(1 row) + +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING UTF8); +ERROR: cannot set JSON encoding for non-bytea output types +LINE 1: SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING UTF8)... + ^ +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +ERROR: unrecognized JSON encoding: invalid_encoding +SELECT JSON_OBJECT(RETURNING bytea); + json_object +------------- + \x7b7d +(1 row) + +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON); + json_object +------------- + \x7b7d +(1 row) + +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF8); + json_object +------------- + \x7b7d +(1 row) + +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF16); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF1... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF32); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF3... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON); +ERROR: cannot use non-string types with explicit FORMAT JSON clause +LINE 1: SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON); + ^ +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON ENCODING UTF8); +ERROR: JSON ENCODING clause is only allowed for bytea input type +LINE 1: SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON ENCODING UTF... + ^ +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON); +WARNING: FORMAT JSON has no effect for json and jsonb types + json_object +---------------- + {"foo" : null} +(1 row) + +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON ENCODING UTF8); +ERROR: JSON ENCODING clause is only allowed for bytea input type +LINE 1: SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON ENCODING UT... + ^ +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON); +WARNING: FORMAT JSON has no effect for json and jsonb types + json_object +---------------- + {"foo" : null} +(1 row) + +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON ENCODING UTF8); +ERROR: JSON ENCODING clause is only allowed for bytea input type +LINE 1: SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON ENCODING U... + ^ +SELECT JSON_OBJECT(NULL: 1); +ERROR: argument 1 cannot be null +HINT: Object keys should be text. +SELECT JSON_OBJECT('a': 2 + 3); + json_object +------------- + {"a" : 5} +(1 row) + +SELECT JSON_OBJECT('a' VALUE 2 + 3); + json_object +------------- + {"a" : 5} +(1 row) + +--SELECT JSON_OBJECT(KEY 'a' VALUE 2 + 3); +SELECT JSON_OBJECT('a' || 2: 1); + json_object +------------- + {"a2" : 1} +(1 row) + +SELECT JSON_OBJECT(('a' || 2) VALUE 1); + json_object +------------- + {"a2" : 1} +(1 row) + +--SELECT JSON_OBJECT('a' || 2 VALUE 1); +--SELECT JSON_OBJECT(KEY 'a' || 2 VALUE 1); +SELECT JSON_OBJECT('a': 2::text); + json_object +------------- + {"a" : "2"} +(1 row) + +SELECT JSON_OBJECT('a' VALUE 2::text); + json_object +------------- + {"a" : "2"} +(1 row) + +--SELECT JSON_OBJECT(KEY 'a' VALUE 2::text); +SELECT JSON_OBJECT(1::text: 2); + json_object +------------- + {"1" : 2} +(1 row) + +SELECT JSON_OBJECT((1::text) VALUE 2); + json_object +------------- + {"1" : 2} +(1 row) + +--SELECT JSON_OBJECT(1::text VALUE 2); +--SELECT JSON_OBJECT(KEY 1::text VALUE 2); +SELECT JSON_OBJECT(json '[1]': 123); +ERROR: key value must be scalar, not array, composite, or json +SELECT JSON_OBJECT(ARRAY[1,2,3]: 'aaa'); +ERROR: key value must be scalar, not array, composite, or json +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' +); + json_object +------------------------------------------------------------------------ + {"a" : "123", "1.23" : 123, "c" : [ 1,true,{ } ], "d" : {"x": 123.45}} +(1 row) + +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' + RETURNING jsonb +); + json_object +------------------------------------------------------------------- + {"a": "123", "c": [1, true, {}], "d": {"x": 123.45}, "1.23": 123} +(1 row) + +/* +SELECT JSON_OBJECT( + 'a': '123', + KEY 1.23 VALUE 123, + 'c' VALUE json '[1, true, {}]' +); +*/ +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa')); + json_object +----------------------------------------------- + {"a" : "123", "b" : {"a" : 111, "b" : "aaa"}} +(1 row) + +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa' RETURNING jsonb)); + json_object +--------------------------------------------- + {"a" : "123", "b" : {"a": 111, "b": "aaa"}} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text)); + json_object +----------------------- + {"a" : "{\"b\" : 1}"} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text) FORMAT JSON); + json_object +------------------- + {"a" : {"b" : 1}} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea)); + json_object +--------------------------------- + {"a" : "\\x7b226222203a20317d"} +(1 row) + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea) FORMAT JSON); + json_object +------------------- + {"a" : {"b" : 1}} +(1 row) + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2); + json_object +---------------------------------- + {"a" : "1", "b" : null, "c" : 2} +(1 row) + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 NULL ON NULL); + json_object +---------------------------------- + {"a" : "1", "b" : null, "c" : 2} +(1 row) + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 ABSENT ON NULL); + json_object +---------------------- + {"a" : "1", "c" : 2} +(1 row) + +SELECT JSON_OBJECT(1: 1, '1': NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '1': NULL NULL ON NULL WITH UNIQUE RETURNING jsonb); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE RETURNING jsonb); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 NULL ON NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE); + json_object +-------------------- + {"1" : 1, "1" : 1} +(1 row) + +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE RETURNING jsonb); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE RETURNING jsonb); + json_object +------------- + {"1": 1} +(1 row) + +SELECT JSON_OBJECT(1: 1, '2': NULL, '3': 1, 4: NULL, '5': 'a' ABSENT ON NULL WITH UNIQUE RETURNING jsonb); + json_object +---------------------------- + {"1": 1, "3": 1, "5": "a"} +(1 row) + +-- JSON_ARRAY() +SELECT JSON_ARRAY(); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING json); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING json FORMAT JSON); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING jsonb); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING jsonb FORMAT JSON); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING text); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING text FORMAT JSON); + json_array +------------ + [] +(1 row) + +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING UTF8); +ERROR: cannot set JSON encoding for non-bytea output types +LINE 1: SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING UTF8); + ^ +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +ERROR: unrecognized JSON encoding: invalid_encoding +SELECT JSON_ARRAY(RETURNING bytea); + json_array +------------ + \x5b5d +(1 row) + +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON); + json_array +------------ + \x5b5d +(1 row) + +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF8); + json_array +------------ + \x5b5d +(1 row) + +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF16); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF16... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF32); +ERROR: unsupported JSON encoding +LINE 1: SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF32... + ^ +HINT: only UTF8 JSON encoding is supported +SELECT JSON_ARRAY('aaa', 111, true, array[1,2,3], NULL, json '{"a": [1]}', jsonb '["a",3]'); + json_array +--------------------------------------------------- + ["aaa", 111, true, [1,2,3], {"a": [1]}, ["a", 3]] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL); + json_array +------------------ + ["a", null, "b"] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL); + json_array +------------ + ["a", "b"] +(1 row) + +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL); + json_array +------------ + ["b"] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL RETURNING jsonb); + json_array +------------------ + ["a", null, "b"] +(1 row) + +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL RETURNING jsonb); + json_array +------------ + ["a", "b"] +(1 row) + +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL RETURNING jsonb); + json_array +------------ + ["b"] +(1 row) + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' RETURNING text)); + json_array +------------------------------- + ["[\"{ \\\"a\\\" : 123 }\"]"] +(1 row) + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text)); + json_array +----------------------- + ["[{ \"a\" : 123 }]"] +(1 row) + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text) FORMAT JSON); + json_array +------------------- + [[{ "a" : 123 }]] +(1 row) + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i)); + json_array +------------ + [1, 2, 4] +(1 row) + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i)); + json_array +------------ + [[1,2], + + [3,4]] +(1 row) + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) RETURNING jsonb); + json_array +------------------ + [[1, 2], [3, 4]] +(1 row) + +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL); +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL RETURNING jsonb); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (3), (1), (NULL), (2)) foo(i) ORDER BY i); + json_array +------------ + [1, 2, 3] +(1 row) + +-- Should fail +SELECT JSON_ARRAY(SELECT FROM (VALUES (1)) foo(i)); +ERROR: subquery must return only one column +LINE 1: SELECT JSON_ARRAY(SELECT FROM (VALUES (1)) foo(i)); + ^ +SELECT JSON_ARRAY(SELECT i, i FROM (VALUES (1)) foo(i)); +ERROR: subquery must return only one column +LINE 1: SELECT JSON_ARRAY(SELECT i, i FROM (VALUES (1)) foo(i)); + ^ +SELECT JSON_ARRAY(SELECT * FROM (VALUES (1, 2)) foo(i, j)); +ERROR: subquery must return only one column +LINE 1: SELECT JSON_ARRAY(SELECT * FROM (VALUES (1, 2)) foo(i, j)); + ^ +-- JSON_ARRAYAGG() +SELECT JSON_ARRAYAGG(i) IS NULL, + JSON_ARRAYAGG(i RETURNING jsonb) IS NULL +FROM generate_series(1, 0) i; + ?column? | ?column? +----------+---------- + t | t +(1 row) + +SELECT JSON_ARRAYAGG(i), + JSON_ARRAYAGG(i RETURNING jsonb) +FROM generate_series(1, 5) i; + json_arrayagg | json_arrayagg +-----------------+----------------- + [1, 2, 3, 4, 5] | [1, 2, 3, 4, 5] +(1 row) + +SELECT JSON_ARRAYAGG(i ORDER BY i DESC) +FROM generate_series(1, 5) i; + json_arrayagg +----------------- + [5, 4, 3, 2, 1] +(1 row) + +SELECT JSON_ARRAYAGG(i::text::json) +FROM generate_series(1, 5) i; + json_arrayagg +----------------- + [1, 2, 3, 4, 5] +(1 row) + +SELECT JSON_ARRAYAGG(JSON_ARRAY(i, i + 1 RETURNING text) FORMAT JSON) +FROM generate_series(1, 5) i; + json_arrayagg +------------------------------------------ + [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6]] +(1 row) + +SELECT JSON_ARRAYAGG(NULL), + JSON_ARRAYAGG(NULL RETURNING jsonb) +FROM generate_series(1, 5); + json_arrayagg | json_arrayagg +---------------+--------------- + [] | [] +(1 row) + +SELECT JSON_ARRAYAGG(NULL NULL ON NULL), + JSON_ARRAYAGG(NULL NULL ON NULL RETURNING jsonb) +FROM generate_series(1, 5); + json_arrayagg | json_arrayagg +--------------------------------+-------------------------------- + [null, null, null, null, null] | [null, null, null, null, null] +(1 row) + +SELECT + JSON_ARRAYAGG(bar), + JSON_ARRAYAGG(bar RETURNING jsonb), + JSON_ARRAYAGG(bar ABSENT ON NULL), + JSON_ARRAYAGG(bar ABSENT ON NULL RETURNING jsonb), + JSON_ARRAYAGG(bar NULL ON NULL), + JSON_ARRAYAGG(bar NULL ON NULL RETURNING jsonb), + JSON_ARRAYAGG(foo), + JSON_ARRAYAGG(foo RETURNING jsonb), + JSON_ARRAYAGG(foo ORDER BY bar) FILTER (WHERE bar > 2), + JSON_ARRAYAGG(foo ORDER BY bar RETURNING jsonb) FILTER (WHERE bar > 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL)) foo(bar); + json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg | json_arrayagg +-----------------+-----------------+-----------------+-----------------+-----------------------------------------+-----------------------------------------+-----------------+--------------------------------------------------------------------------------------------------------------------------+---------------+-------------------------------------- + [3, 1, 5, 2, 4] | [3, 1, 5, 2, 4] | [3, 1, 5, 2, 4] | [3, 1, 5, 2, 4] | [null, 3, 1, null, null, 5, 2, 4, null] | [null, 3, 1, null, null, 5, 2, 4, null] | [{"bar":null}, +| [{"bar": null}, {"bar": 3}, {"bar": 1}, {"bar": null}, {"bar": null}, {"bar": 5}, {"bar": 2}, {"bar": 4}, {"bar": null}] | [{"bar":3}, +| [{"bar": 3}, {"bar": 4}, {"bar": 5}] + | | | | | | {"bar":3}, +| | {"bar":4}, +| + | | | | | | {"bar":1}, +| | {"bar":5}] | + | | | | | | {"bar":null}, +| | | + | | | | | | {"bar":null}, +| | | + | | | | | | {"bar":5}, +| | | + | | | | | | {"bar":2}, +| | | + | | | | | | {"bar":4}, +| | | + | | | | | | {"bar":null}] | | | +(1 row) + +SELECT + bar, JSON_ARRAYAGG(bar) FILTER (WHERE bar > 2) OVER (PARTITION BY foo.bar % 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL), (5), (4)) foo(bar); + bar | json_arrayagg +-----+--------------- + 4 | [4, 4] + 4 | [4, 4] + 2 | [4, 4] + 5 | [5, 3, 5] + 3 | [5, 3, 5] + 1 | [5, 3, 5] + 5 | [5, 3, 5] + | + | + | + | +(11 rows) + +-- JSON_OBJECTAGG() +SELECT JSON_OBJECTAGG('key': 1) IS NULL, + JSON_OBJECTAGG('key': 1 RETURNING jsonb) IS NULL +WHERE FALSE; + ?column? | ?column? +----------+---------- + t | t +(1 row) + +SELECT JSON_OBJECTAGG(NULL: 1); +ERROR: field name must not be null +SELECT JSON_OBJECTAGG(NULL: 1 RETURNING jsonb); +ERROR: field name must not be null +SELECT + JSON_OBJECTAGG(i: i), +-- JSON_OBJECTAGG(i VALUE i), +-- JSON_OBJECTAGG(KEY i VALUE i), + JSON_OBJECTAGG(i: i RETURNING jsonb) +FROM + generate_series(1, 5) i; + json_objectagg | json_objectagg +-------------------------------------------------+------------------------------------------ + { "1" : 1, "2" : 2, "3" : 3, "4" : 4, "5" : 5 } | {"1": 1, "2": 2, "3": 3, "4": 4, "5": 5} +(1 row) + +SELECT + JSON_OBJECTAGG(k: v), + JSON_OBJECTAGG(k: v NULL ON NULL), + JSON_OBJECTAGG(k: v ABSENT ON NULL), + JSON_OBJECTAGG(k: v RETURNING jsonb), + JSON_OBJECTAGG(k: v NULL ON NULL RETURNING jsonb), + JSON_OBJECTAGG(k: v ABSENT ON NULL RETURNING jsonb) +FROM + (VALUES (1, 1), (1, NULL), (2, NULL), (3, 3)) foo(k, v); + json_objectagg | json_objectagg | json_objectagg | json_objectagg | json_objectagg | json_objectagg +----------------------------------------------+----------------------------------------------+----------------------+--------------------------------+--------------------------------+------------------ + { "1" : 1, "1" : null, "2" : null, "3" : 3 } | { "1" : 1, "1" : null, "2" : null, "3" : 3 } | { "1" : 1, "3" : 3 } | {"1": null, "2": null, "3": 3} | {"1": null, "2": null, "3": 3} | {"1": 1, "3": 3} +(1 row) + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (0, NULL), (3, NULL), (2, 2), (4, NULL)) foo(k, v); + json_objectagg +---------------------- + { "1" : 1, "2" : 2 } +(1 row) + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); +ERROR: duplicate JSON key "1" +-- Test JSON_OBJECT deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_OBJECT('foo' : '1' FORMAT JSON, 'bar' : 'baz' RETURNING json); + QUERY PLAN +------------------------------------------------------------------------------ + Result + Output: JSON_OBJECT('foo' : '1'::json, 'bar' : 'baz'::text RETURNING json) +(2 rows) + +CREATE VIEW json_object_view AS +SELECT JSON_OBJECT('foo' : '1' FORMAT JSON, 'bar' : 'baz' RETURNING json); +\sv json_object_view +CREATE OR REPLACE VIEW public.json_object_view AS + SELECT JSON_OBJECT('foo' : '1'::text FORMAT JSON, 'bar' : 'baz'::text RETURNING json) AS "json_object" +DROP VIEW json_object_view; +-- Test JSON_ARRAY deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAY('1' FORMAT JSON, 2 RETURNING json); + QUERY PLAN +--------------------------------------------------- + Result + Output: JSON_ARRAY('1'::json, 2 RETURNING json) +(2 rows) + +CREATE VIEW json_array_view AS +SELECT JSON_ARRAY('1' FORMAT JSON, 2 RETURNING json); +\sv json_array_view +CREATE OR REPLACE VIEW public.json_array_view AS + SELECT JSON_ARRAY('1'::text FORMAT JSON, 2 RETURNING json) AS "json_array" +DROP VIEW json_array_view; +-- Test JSON_OBJECTAGG deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_OBJECTAGG(i: ('111' || i)::bytea FORMAT JSON WITH UNIQUE RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------------------- + Aggregate + Output: JSON_OBJECTAGG(i : (('111'::text || (i)::text))::bytea FORMAT JSON WITH UNIQUE KEYS RETURNING text) FILTER (WHERE (i > 3)) + -> Function Scan on pg_catalog.generate_series i + Output: i + Function Call: generate_series(1, 5) +(5 rows) + +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_OBJECTAGG(i: ('111' || i)::bytea FORMAT JSON WITH UNIQUE RETURNING text) OVER (PARTITION BY i % 2) +FROM generate_series(1,5) i; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + WindowAgg + Output: JSON_OBJECTAGG(i : (('111'::text || (i)::text))::bytea FORMAT JSON WITH UNIQUE KEYS RETURNING text) OVER (?), ((i % 2)) + -> Sort + Output: ((i % 2)), i + Sort Key: ((i.i % 2)) + -> Function Scan on pg_catalog.generate_series i + Output: (i % 2), i + Function Call: generate_series(1, 5) +(8 rows) + +CREATE VIEW json_objectagg_view AS +SELECT JSON_OBJECTAGG(i: ('111' || i)::bytea FORMAT JSON WITH UNIQUE RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; +\sv json_objectagg_view +CREATE OR REPLACE VIEW public.json_objectagg_view AS + SELECT JSON_OBJECTAGG(i.i : ('111'::text || i.i)::bytea FORMAT JSON WITH UNIQUE KEYS RETURNING text) FILTER (WHERE i.i > 3) AS "json_objectagg" + FROM generate_series(1, 5) i(i) +DROP VIEW json_objectagg_view; +-- Test JSON_ARRAYAGG deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAYAGG(('111' || i)::bytea FORMAT JSON NULL ON NULL RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Aggregate + Output: JSON_ARRAYAGG((('111'::text || (i)::text))::bytea FORMAT JSON NULL ON NULL RETURNING text) FILTER (WHERE (i > 3)) + -> Function Scan on pg_catalog.generate_series i + Output: i + Function Call: generate_series(1, 5) +(5 rows) + +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAYAGG(('111' || i)::bytea FORMAT JSON NULL ON NULL RETURNING text) OVER (PARTITION BY i % 2) +FROM generate_series(1,5) i; + QUERY PLAN +-------------------------------------------------------------------------------------------------------------------------- + WindowAgg + Output: JSON_ARRAYAGG((('111'::text || (i)::text))::bytea FORMAT JSON NULL ON NULL RETURNING text) OVER (?), ((i % 2)) + -> Sort + Output: ((i % 2)), i + Sort Key: ((i.i % 2)) + -> Function Scan on pg_catalog.generate_series i + Output: (i % 2), i + Function Call: generate_series(1, 5) +(8 rows) + +CREATE VIEW json_arrayagg_view AS +SELECT JSON_ARRAYAGG(('111' || i)::bytea FORMAT JSON NULL ON NULL RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; +\sv json_arrayagg_view +CREATE OR REPLACE VIEW public.json_arrayagg_view AS + SELECT JSON_ARRAYAGG(('111'::text || i.i)::bytea FORMAT JSON NULL ON NULL RETURNING text) FILTER (WHERE i.i > 3) AS "json_arrayagg" + FROM generate_series(1, 5) i(i) +DROP VIEW json_arrayagg_view; +-- Test JSON_ARRAY(subquery) deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i) RETURNING jsonb); + QUERY PLAN +--------------------------------------------------------------------- + Result + Output: $0 + InitPlan 1 (returns $0) + -> Aggregate + Output: JSON_ARRAYAGG("*VALUES*".column1 RETURNING jsonb) + -> Values Scan on "*VALUES*" + Output: "*VALUES*".column1 +(7 rows) + +CREATE VIEW json_array_subquery_view AS +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i) RETURNING jsonb); +\sv json_array_subquery_view +CREATE OR REPLACE VIEW public.json_array_subquery_view AS + SELECT ( SELECT JSON_ARRAYAGG(q.a RETURNING jsonb) AS "json_arrayagg" + FROM ( SELECT foo.i + FROM ( VALUES (1), (2), (NULL::integer), (4)) foo(i)) q(a)) AS "json_array" +DROP VIEW json_array_subquery_view; diff --git a/src/test/regress/parallel_schedule b/src/test/regress/parallel_schedule index 026ea880cd..e3e9da0c8c 100644 --- a/src/test/regress/parallel_schedule +++ b/src/test/regress/parallel_schedule @@ -100,7 +100,7 @@ test: select_views portals_p2 foreign_key cluster dependency guc bitmapops combo # ---------- # Another group of parallel tests (JSON related) # ---------- -test: json jsonb json_encoding jsonpath jsonpath_encoding jsonb_jsonpath +test: json jsonb json_encoding jsonpath jsonpath_encoding jsonb_jsonpath sqljson # ---------- # Another group of parallel tests diff --git a/src/test/regress/serial_schedule b/src/test/regress/serial_schedule index 979d926119..d25bf870bd 100644 --- a/src/test/regress/serial_schedule +++ b/src/test/regress/serial_schedule @@ -171,6 +171,7 @@ test: json_encoding test: jsonpath test: jsonpath_encoding test: jsonb_jsonpath +test: sqljson test: plancache test: limit test: plpgsql diff --git a/src/test/regress/sql/opr_sanity.sql b/src/test/regress/sql/opr_sanity.sql index 7a9180b081..4f8f48e54e 100644 --- a/src/test/regress/sql/opr_sanity.sql +++ b/src/test/regress/sql/opr_sanity.sql @@ -903,8 +903,10 @@ WHERE a.aggfnoid = p.oid AND NOT binary_coercible(p.proargtypes[1], ptr.proargtypes[2])) OR (p.pronargs > 2 AND NOT binary_coercible(p.proargtypes[2], ptr.proargtypes[3])) - -- we could carry the check further, but 3 args is enough for now - OR (p.pronargs > 3) + OR (p.pronargs > 3 AND + NOT binary_coercible(p.proargtypes[3], ptr.proargtypes[4])) + -- we could carry the check further, but 4 args is enough for now + OR (p.pronargs > 4) ); -- Cross-check finalfn (if present) against its entry in pg_proc. diff --git a/src/test/regress/sql/sqljson.sql b/src/test/regress/sql/sqljson.sql new file mode 100644 index 0000000000..aaef2d8aab --- /dev/null +++ b/src/test/regress/sql/sqljson.sql @@ -0,0 +1,282 @@ +-- JSON_OBJECT() +SELECT JSON_OBJECT(); +SELECT JSON_OBJECT(RETURNING json); +SELECT JSON_OBJECT(RETURNING json FORMAT JSON); +SELECT JSON_OBJECT(RETURNING jsonb); +SELECT JSON_OBJECT(RETURNING jsonb FORMAT JSON); +SELECT JSON_OBJECT(RETURNING text); +SELECT JSON_OBJECT(RETURNING text FORMAT JSON); +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +SELECT JSON_OBJECT(RETURNING bytea); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF16); +SELECT JSON_OBJECT(RETURNING bytea FORMAT JSON ENCODING UTF32); + +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON); +SELECT JSON_OBJECT('foo': NULL::int FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON); +SELECT JSON_OBJECT('foo': NULL::json FORMAT JSON ENCODING UTF8); +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON); +SELECT JSON_OBJECT('foo': NULL::jsonb FORMAT JSON ENCODING UTF8); + +SELECT JSON_OBJECT(NULL: 1); +SELECT JSON_OBJECT('a': 2 + 3); +SELECT JSON_OBJECT('a' VALUE 2 + 3); +--SELECT JSON_OBJECT(KEY 'a' VALUE 2 + 3); +SELECT JSON_OBJECT('a' || 2: 1); +SELECT JSON_OBJECT(('a' || 2) VALUE 1); +--SELECT JSON_OBJECT('a' || 2 VALUE 1); +--SELECT JSON_OBJECT(KEY 'a' || 2 VALUE 1); +SELECT JSON_OBJECT('a': 2::text); +SELECT JSON_OBJECT('a' VALUE 2::text); +--SELECT JSON_OBJECT(KEY 'a' VALUE 2::text); +SELECT JSON_OBJECT(1::text: 2); +SELECT JSON_OBJECT((1::text) VALUE 2); +--SELECT JSON_OBJECT(1::text VALUE 2); +--SELECT JSON_OBJECT(KEY 1::text VALUE 2); +SELECT JSON_OBJECT(json '[1]': 123); +SELECT JSON_OBJECT(ARRAY[1,2,3]: 'aaa'); + +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' +); + +SELECT JSON_OBJECT( + 'a': '123', + 1.23: 123, + 'c': json '[ 1,true,{ } ]', + 'd': jsonb '{ "x" : 123.45 }' + RETURNING jsonb +); + +/* +SELECT JSON_OBJECT( + 'a': '123', + KEY 1.23 VALUE 123, + 'c' VALUE json '[1, true, {}]' +); +*/ + +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa')); +SELECT JSON_OBJECT('a': '123', 'b': JSON_OBJECT('a': 111, 'b': 'aaa' RETURNING jsonb)); + +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text)); +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING text) FORMAT JSON); +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea)); +SELECT JSON_OBJECT('a': JSON_OBJECT('b': 1 RETURNING bytea) FORMAT JSON); + +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2); +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 NULL ON NULL); +SELECT JSON_OBJECT('a': '1', 'b': NULL, 'c': 2 ABSENT ON NULL); + +SELECT JSON_OBJECT(1: 1, '1': NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '1': NULL NULL ON NULL WITH UNIQUE RETURNING jsonb); +SELECT JSON_OBJECT(1: 1, '1': NULL ABSENT ON NULL WITH UNIQUE RETURNING jsonb); + +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 NULL ON NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITH UNIQUE RETURNING jsonb); +SELECT JSON_OBJECT(1: 1, '2': NULL, '1': 1 ABSENT ON NULL WITHOUT UNIQUE RETURNING jsonb); +SELECT JSON_OBJECT(1: 1, '2': NULL, '3': 1, 4: NULL, '5': 'a' ABSENT ON NULL WITH UNIQUE RETURNING jsonb); + + +-- JSON_ARRAY() +SELECT JSON_ARRAY(); +SELECT JSON_ARRAY(RETURNING json); +SELECT JSON_ARRAY(RETURNING json FORMAT JSON); +SELECT JSON_ARRAY(RETURNING jsonb); +SELECT JSON_ARRAY(RETURNING jsonb FORMAT JSON); +SELECT JSON_ARRAY(RETURNING text); +SELECT JSON_ARRAY(RETURNING text FORMAT JSON); +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING UTF8); +SELECT JSON_ARRAY(RETURNING text FORMAT JSON ENCODING INVALID_ENCODING); +SELECT JSON_ARRAY(RETURNING bytea); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF8); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF16); +SELECT JSON_ARRAY(RETURNING bytea FORMAT JSON ENCODING UTF32); + +SELECT JSON_ARRAY('aaa', 111, true, array[1,2,3], NULL, json '{"a": [1]}', jsonb '["a",3]'); + +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL); +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL); +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL); +SELECT JSON_ARRAY('a', NULL, 'b' NULL ON NULL RETURNING jsonb); +SELECT JSON_ARRAY('a', NULL, 'b' ABSENT ON NULL RETURNING jsonb); +SELECT JSON_ARRAY(NULL, NULL, 'b' ABSENT ON NULL RETURNING jsonb); + +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' RETURNING text)); +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text)); +SELECT JSON_ARRAY(JSON_ARRAY('{ "a" : 123 }' FORMAT JSON RETURNING text) FORMAT JSON); + +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i)); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i)); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) RETURNING jsonb); +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL); +--SELECT JSON_ARRAY(SELECT i FROM (VALUES (NULL::int[]), ('{1,2}'), (NULL), (NULL), ('{3,4}'), (NULL)) foo(i) NULL ON NULL RETURNING jsonb); +SELECT JSON_ARRAY(SELECT i FROM (VALUES (3), (1), (NULL), (2)) foo(i) ORDER BY i); +-- Should fail +SELECT JSON_ARRAY(SELECT FROM (VALUES (1)) foo(i)); +SELECT JSON_ARRAY(SELECT i, i FROM (VALUES (1)) foo(i)); +SELECT JSON_ARRAY(SELECT * FROM (VALUES (1, 2)) foo(i, j)); + +-- JSON_ARRAYAGG() +SELECT JSON_ARRAYAGG(i) IS NULL, + JSON_ARRAYAGG(i RETURNING jsonb) IS NULL +FROM generate_series(1, 0) i; + +SELECT JSON_ARRAYAGG(i), + JSON_ARRAYAGG(i RETURNING jsonb) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(i ORDER BY i DESC) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(i::text::json) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(JSON_ARRAY(i, i + 1 RETURNING text) FORMAT JSON) +FROM generate_series(1, 5) i; + +SELECT JSON_ARRAYAGG(NULL), + JSON_ARRAYAGG(NULL RETURNING jsonb) +FROM generate_series(1, 5); + +SELECT JSON_ARRAYAGG(NULL NULL ON NULL), + JSON_ARRAYAGG(NULL NULL ON NULL RETURNING jsonb) +FROM generate_series(1, 5); + +SELECT + JSON_ARRAYAGG(bar), + JSON_ARRAYAGG(bar RETURNING jsonb), + JSON_ARRAYAGG(bar ABSENT ON NULL), + JSON_ARRAYAGG(bar ABSENT ON NULL RETURNING jsonb), + JSON_ARRAYAGG(bar NULL ON NULL), + JSON_ARRAYAGG(bar NULL ON NULL RETURNING jsonb), + JSON_ARRAYAGG(foo), + JSON_ARRAYAGG(foo RETURNING jsonb), + JSON_ARRAYAGG(foo ORDER BY bar) FILTER (WHERE bar > 2), + JSON_ARRAYAGG(foo ORDER BY bar RETURNING jsonb) FILTER (WHERE bar > 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL)) foo(bar); + +SELECT + bar, JSON_ARRAYAGG(bar) FILTER (WHERE bar > 2) OVER (PARTITION BY foo.bar % 2) +FROM + (VALUES (NULL), (3), (1), (NULL), (NULL), (5), (2), (4), (NULL), (5), (4)) foo(bar); + +-- JSON_OBJECTAGG() +SELECT JSON_OBJECTAGG('key': 1) IS NULL, + JSON_OBJECTAGG('key': 1 RETURNING jsonb) IS NULL +WHERE FALSE; + +SELECT JSON_OBJECTAGG(NULL: 1); + +SELECT JSON_OBJECTAGG(NULL: 1 RETURNING jsonb); + +SELECT + JSON_OBJECTAGG(i: i), +-- JSON_OBJECTAGG(i VALUE i), +-- JSON_OBJECTAGG(KEY i VALUE i), + JSON_OBJECTAGG(i: i RETURNING jsonb) +FROM + generate_series(1, 5) i; + +SELECT + JSON_OBJECTAGG(k: v), + JSON_OBJECTAGG(k: v NULL ON NULL), + JSON_OBJECTAGG(k: v ABSENT ON NULL), + JSON_OBJECTAGG(k: v RETURNING jsonb), + JSON_OBJECTAGG(k: v NULL ON NULL RETURNING jsonb), + JSON_OBJECTAGG(k: v ABSENT ON NULL RETURNING jsonb) +FROM + (VALUES (1, 1), (1, NULL), (2, NULL), (3, 3)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS) +FROM (VALUES (1, 1), (0, NULL), (3, NULL), (2, 2), (4, NULL)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +SELECT JSON_OBJECTAGG(k: v ABSENT ON NULL WITH UNIQUE KEYS RETURNING jsonb) +FROM (VALUES (1, 1), (1, NULL), (2, 2)) foo(k, v); + +-- Test JSON_OBJECT deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_OBJECT('foo' : '1' FORMAT JSON, 'bar' : 'baz' RETURNING json); + +CREATE VIEW json_object_view AS +SELECT JSON_OBJECT('foo' : '1' FORMAT JSON, 'bar' : 'baz' RETURNING json); + +\sv json_object_view + +DROP VIEW json_object_view; + +-- Test JSON_ARRAY deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAY('1' FORMAT JSON, 2 RETURNING json); + +CREATE VIEW json_array_view AS +SELECT JSON_ARRAY('1' FORMAT JSON, 2 RETURNING json); + +\sv json_array_view + +DROP VIEW json_array_view; + +-- Test JSON_OBJECTAGG deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_OBJECTAGG(i: ('111' || i)::bytea FORMAT JSON WITH UNIQUE RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; + +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_OBJECTAGG(i: ('111' || i)::bytea FORMAT JSON WITH UNIQUE RETURNING text) OVER (PARTITION BY i % 2) +FROM generate_series(1,5) i; + +CREATE VIEW json_objectagg_view AS +SELECT JSON_OBJECTAGG(i: ('111' || i)::bytea FORMAT JSON WITH UNIQUE RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; + +\sv json_objectagg_view + +DROP VIEW json_objectagg_view; + +-- Test JSON_ARRAYAGG deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAYAGG(('111' || i)::bytea FORMAT JSON NULL ON NULL RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; + +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAYAGG(('111' || i)::bytea FORMAT JSON NULL ON NULL RETURNING text) OVER (PARTITION BY i % 2) +FROM generate_series(1,5) i; + +CREATE VIEW json_arrayagg_view AS +SELECT JSON_ARRAYAGG(('111' || i)::bytea FORMAT JSON NULL ON NULL RETURNING text) FILTER (WHERE i > 3) +FROM generate_series(1,5) i; + +\sv json_arrayagg_view + +DROP VIEW json_arrayagg_view; + +-- Test JSON_ARRAY(subquery) deparsing +EXPLAIN (VERBOSE, COSTS OFF) +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i) RETURNING jsonb); + +CREATE VIEW json_array_subquery_view AS +SELECT JSON_ARRAY(SELECT i FROM (VALUES (1), (2), (NULL), (4)) foo(i) RETURNING jsonb); + +\sv json_array_subquery_view + +DROP VIEW json_array_subquery_view; -- 2.25.4