I have a table that has 2 columns _key (string)
and data (json)
, I am trying to upsert data into the table and getting error
missing FROM-clause entry for table "updates"
The error goes away if I remove the DO UPDATE SET
and replace it with DO NOTHING
.
I have also tried using EXCLUDED.field
& EXCLUDED.value
in the UPDATE
section, but that results in another error
column excluded.field does not exist
Here is the query I am trying to fix, if there is a conflict the query is supposed to update the json object so that the column is set to the oldValue + newValue.
WITH updates AS
(
SELECT UNNEST($1::TEXT[]) AS _key,
UNNEST($2::TEXT[]) AS field,
UNNEST($3::NUMERIC[]) AS value
)
INSERT INTO "legacy_hash" ("_key", "data")
SELECT _key, jsonb_build_object(field, value) FROM updates
ON CONFLICT ("_key")
DO UPDATE SET "data" = jsonb_set(
"legacy_hash"."data",
ARRAY[updates.field],
to_jsonb(COALESCE(("legacy_hash"."data"->>updates.field)::NUMERIC, 0) + updates.value)
)
I have a version that works on a single (key, field, value)
below
INSERT INTO "legacy_hash" ("_key", "data")
VALUES ($1::TEXT, jsonb_build_object($2::TEXT, $3::NUMERIC))
ON CONFLICT ("_key")
DO UPDATE SET "data" = jsonb_set(
"legacy_hash"."data",
ARRAY[$2::TEXT],
to_jsonb(COALESCE(("legacy_hash"."data"->>$2::TEXT)::NUMERIC, 0) + $3::NUMERIC)
)
I have a table that has 2 columns _key (string)
and data (json)
, I am trying to upsert data into the table and getting error
missing FROM-clause entry for table "updates"
The error goes away if I remove the DO UPDATE SET
and replace it with DO NOTHING
.
I have also tried using EXCLUDED.field
& EXCLUDED.value
in the UPDATE
section, but that results in another error
column excluded.field does not exist
Here is the query I am trying to fix, if there is a conflict the query is supposed to update the json object so that the column is set to the oldValue + newValue.
WITH updates AS
(
SELECT UNNEST($1::TEXT[]) AS _key,
UNNEST($2::TEXT[]) AS field,
UNNEST($3::NUMERIC[]) AS value
)
INSERT INTO "legacy_hash" ("_key", "data")
SELECT _key, jsonb_build_object(field, value) FROM updates
ON CONFLICT ("_key")
DO UPDATE SET "data" = jsonb_set(
"legacy_hash"."data",
ARRAY[updates.field],
to_jsonb(COALESCE(("legacy_hash"."data"->>updates.field)::NUMERIC, 0) + updates.value)
)
I have a version that works on a single (key, field, value)
below
INSERT INTO "legacy_hash" ("_key", "data")
VALUES ($1::TEXT, jsonb_build_object($2::TEXT, $3::NUMERIC))
ON CONFLICT ("_key")
DO UPDATE SET "data" = jsonb_set(
"legacy_hash"."data",
ARRAY[$2::TEXT],
to_jsonb(COALESCE(("legacy_hash"."data"->>$2::TEXT)::NUMERIC, 0) + $3::NUMERIC)
)
Share
Improve this question
edited Feb 2 at 8:57
marc_s
755k184 gold badges1.4k silver badges1.5k bronze badges
asked Feb 2 at 2:41
Barış UşaklıBarış Uşaklı
13.5k7 gold badges44 silver badges69 bronze badges
2
|
2 Answers
Reset to default 1Unfortunately, the only tables you can access in the ON CONFLICT DO
clause are the current row values (as the table name, or alias if any), and the proposed but failing row (as the excluded
table).
The
SET
andWHERE
clauses inON CONFLICT DO UPDATE
have access to the existing row using the table's name (or an alias), and to the row proposed for insertion using the specialexcluded
table.
But in your case, you can rebuild what you need in a more convoluted way, by breaking down the jsonb
to get your values out. You can use the JSONPath function keyvalue()
to get the keys and values of an object as a JSONB array.
WITH updates AS (
SELECT UNNEST($1::TEXT[]) AS _key,
UNNEST($2::TEXT[]) AS field,
UNNEST($3::NUMERIC[]) AS value
)
INSERT INTO legacy_hash (_key, data)
SELECT _key, jsonb_build_object(field, value)
FROM updates
ON CONFLICT (_key)
DO UPDATE SET data = jsonb_set(
legacy_hash.data,
ARRAY[jsonb_path_query_first(excluded.data, '$.keyvalue()[0]')->>'key'],
to_jsonb(
COALESCE((legacy_hash.data->>(jsonb_path_query_first(excluded.data, '$.keyvalue()[0]')->>'key'::text))::NUMERIC, 0)
+ (jsonb_path_query_first(excluded.data, '$.keyvalue()[0]')->>'value')::NUMERIC
)
);
Alternatively, just use a MERGE
WITH updates AS (
SELECT UNNEST($1::TEXT[]) AS _key,
UNNEST($2::TEXT[]) AS field,
UNNEST($3::NUMERIC[]) AS value
)
MERGE INTO legacy_hash AS lh
USING updates AS u
ON u._key = lh._key
WHEN NOT MATCHED THEN
INSERT (_key, data)
VALUES (u._key, jsonb_build_object(u.field, u.value))
WHEN MATCHED THEN
UPDATE SET
data = jsonb_set(
lh.data,
ARRAY[u.field],
to_jsonb(COALESCE((lh.data->>u.field)::NUMERIC, 0) + u.value)
)
;
db<>fiddle
Another one that works, instead of passing the fields/values as an array I've turned them into JSON strings.
INSERT INTO "legacy_hash" ("_key", "data")
SELECT k, d
FROM UNNEST($1::TEXT[], $2::JSONB[]) vs(k, d)
ON CONFLICT ("_key")
DO UPDATE SET "data" = (
SELECT jsonb_object_agg(
key,
CASE
WHEN jsonb_typeof(legacy_hash.data -> key) = 'number'
AND jsonb_typeof(EXCLUDED.data -> key) = 'number'
THEN to_jsonb((legacy_hash.data ->> key)::NUMERIC + (EXCLUDED.data ->> key)::NUMERIC)
ELSE COALESCE(EXCLUDED.data -> key, legacy_hash.data -> key)
END
)
FROM jsonb_each(legacy_hash.data || EXCLUDED.data) AS merged(key, value)
);
"
s: doing so introduces visual noise that makes queries more diffiicult to read. – JohnH Commented Feb 2 at 3:36merge
which is much more flexible thanINSERT ... ON CONFLICT DO UPDATE
. – JohnH Commented Feb 2 at 4:14