How to replace object in JSONB array with matching value(s)? - postgresql

I have some nested JSONB object/arrays that I want to update/insert into array completely when a given set of values match.
I've tried this command UPDATE asset SET detail = jsonb_set(detail, '{"date": "01/01/2019"}', '{"name": "ABC", "date": "01/01/2019"}');, but doesn't work since the command is looking for an index and using || simply appends the new object into the array instead of replacing it.
Schemas
Table
CREATE TABLE asset (
id uuid NOT NULL DEFAULT uuid_generate_v1(),
detail jsonb NULL,
events_connected jsonb NULL
);
JSONB Schemas
[{
"name": "Stackoverflow",
"date": "01/01/2019"
}]
A little more in depth
[{
"id": "12345",
"events": [{"type": 0, "date": "01/01/01", "ratio": 1.0}]
}]
EDIT:
I've played around with this code which almost works, but it updates all rows with the new object instead of simply inserting/updating the event id and keys that matches. The sub query that finds the matching params seem to work.
update
asset
set
events_connected =
jsonb_set(
events_connected,
array[elem_index::text, 'events', nested_elem_index::text],
'{"type": 2, "ratio": 5.0, "tax": 1, "date": "01/02/2019"}'::jsonb,
true)
from (
select
(pos - 1) as "elem_index",
(pos2 - 1) as "nested_elem_index",
elem->>'id' as "id"
from
asset,
jsonb_array_elements(events_connected) with ordinality arr(elem, pos),
jsonb_array_elements(elem->'events') with ordinality arr2(elem2, pos2)
WHERE
elem->>'id' = '12345' AND
elem2->>'type' = '1' AND
elem2->>'date' = '01/02/2019'
) sub
WHERE
sub."elem_index" IS NOT NULL AND sub."nested_elem_index" IS NOT NULL;
Fiddle: https://dbfiddle.uk/?rdbms=postgres_11&fiddle=1b3f3b0c7a9f0adda50c54011880b61d

Related

Update property of object in jsonb array and keep other properties

I have a postgres table with a jsonb colum like this:
create table if not exists doc
(
id uuid not null
constraint pkey_doc_id
primary key,
data jsonb not null
);
INSERT INTO doc (id, data) VALUES ('3cf40366-ea58-402d-b63b-c9d6fdf99ec8', '{"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8", "Tags": [{"Key": "inoivce", "Value": "70086"},{"Key": "customer", "Value": "100233"}] }' );
INSERT INTO doc (id, data) VALUES ('ae2d1119-adb9-41d2-96e9-53445eaf97ab', '{"Id": "ae2d1119-adb9-41d2-96e9-53445eaf97ab", "Tags": [{"Key": "project", "Value": "12345"},{"Key": "customer", "Value": "100233"}]}' );b9-41d2-96e9-53445eaf97ab", "Tags": [{"Key": "customer", "Value": "100233"}]}' )
Tags.Key in the first row contains a typo inoivce which I want to fix to invoice:
{
"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8",
"Tags": [{
"Key": "inoivce",
"Value": "70086"
},{
"Key": "customer",
"Value": "100233"
}]
}
I tried this:
update doc set data = jsonb_set(
data,
'{"Tags"}',
$${"Key":"invoice"}$$
) where data #> '{"Tags": [{ "Key":"inoivce"}]}';
The typo gets fixed but I'm loosing the other Tags elements in the array:
{
"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8",
"Tags": [{"Key": "invoice"}]
}
How can I fix the typo without removing the other elements of the Tags array?
Dbfiddle for repro.
One possible solution, not so obvious : we need a CTE because the idea here is to loop on the 'Tags' jsonb array elements using the jsonb_agg aggregate function to rebuild the array, but the SET clause of an UPDATE doesn't accept aggregate functions ...
WITH list AS
( SELECT d.id, (d.data - 'Tags') || jsonb_build_object('Tags', jsonb_agg(jsonb_set(e.content, '{Key}' :: text[], to_jsonb(replace(e.content->>'Key', 'inoivce', 'invoice'))) ORDER BY e.id)) AS data
FROM doc AS d
CROSS JOIN LATERAL jsonb_array_elements(d.data->'Tags') WITH ORDINALITY AS e(content, id)
WHERE d.data #? '$.Tags[*] ? (exists(# ? (#.Key == "inoivce")))'
GROUP BY d.id, d.data
)
UPDATE doc AS d
SET data = l.data
FROM list AS l
WHERE d.id = l.id
see the result in dbfiddle

How to add new property in an object nested in 2 arrays (JSONB postgresql)

I am looking to you for help in adding a property to a json object nested in 2 arrays.
Table Example :
CREATE TABLE events (
seq_id BIGINT PRIMARY KEY,
data JSONB NOT NULL,
evt_type TEXT NOT NULL
);
example of my JSONB data column in my table:
{
"Id":"1",
"Calendar":{
"Entries":[
{
"Id": 1,
"SubEntries":[
{
"ExtId":{
"Id":"10",
"System": "SampleSys"
},
"Country":"FR",
"Details":[
{
"ItemId":"1",
"Quantity":10,
},
{
"ItemId":"2",
"Quantity":3,
}
],
"RequestId":"222",
"TypeId":1,
}
],
"OrderResult":null
}
],
"OtherThingsArray":[
]
}
}
So I need to add new properties into a SubEntries object based on the Id value of the ExtId object (The where clause)
How can I do that please ?
Thanks a lot
You can use jsonb_set() for this, which takes jsonpath assignments as a text[] (array of text values) as
SELECT jsonb_set(
input_jsonb,
the starting jsonb document
path_array '{i,j,k[, ...]}'::text[],
the path array, where the series {i, j, k} progresses at each level with either the (string) key or (integer) index (starting at zero)denoting the new key (or index) to populate
new_jsonb_value,
if adding a key-value pair, you can use something like to_jsonb('new_value_string'::text) here to force things to format correctly
create_if_not_exists_boolean
if adding new keys/indexes, give this as true so they'll be appended; otherwise you'll be limited to overwriting existing keys
)
Example
json
{
"array1": [
{
"id": 1,
"detail": "test"
}
]
}
SQL
SELECT
jsonb_set('{"array1": [{"id": 1, "detail": "test"}]}'::jsonb,
'{array1,0,update}'::TEXT[],
to_jsonb('new'::text),
true
)
Output
{
"array1": [
{
"id": 1,
"upd": "new",
"detail": "test"
}
]
}
Note that you can only add 1 nominal level of depth at a time (i.e. either a new key or a new index entry), but you can circumvent this by providing the full depth in the assignment value, or by using jsonb_set() iteratively:
select
jsonb_set(
jsonb_set('{"array1": [{"id": 1, "detail": "test"}]}'::jsonb, '{array1,0,upd}'::TEXT[], '[{"new": "val"}]'::jsonb, true),
'{array1,0,upd,0,check}'::TEXT[],
'"test_val"',
true)
would be required to produce
{
"array1": [
{
"id": 1,
"upd": [
{
"new": "val",
"check": "test_val"
}
],
"detail": "test"
}
]
}
If you need other, more complex logic to evaluate which values need to be added to which objects, you can try:
dynamically creating a set of jsonb_set() statements for execution
using the outputs from queries of jsonb_each() and jsonb_array_elements() to evaluate the row logic down at the SubEntities level, and then using jsonb_object_agg() and jsonb_agg() as appropriate to build the document back up to the root level from the resultant object-arrays and key-value collections

Postgres JSONB Editing columns

I was going through the Postgres Jsonb documentation but was unable to find a solution for a small issue I'm having.
I've got a table : MY_TABLE
that has the following columns:
User, Name, Data and Purchased
One thing to note is that "Data" is a jsonb and has multiple fields. One of the fields inside of "Data" is "Attribute" but the values it can hold are not in sync. What I mean is, it could be a string, a list of strings, an empty list, or just an empty string. However, I want to change this.
The only values that I want to allow are a list of strings and an empty list. I want to convert all the empty strings to empty lists and regular strings to a list of strings.
I have tried using json_build_array but have not had any luck
So for example, I'd want my final jsonb to look like :
[{
"Id": 1,
"Attributes": ["Test"]
},
{
"Id": 2,
"Attributes": []
},
{
"Id": 3,
"Attributes": []
}]
when converted from
{
"Id": 1,
"Attributes": "Test"
},
{
"Id": 2,
"Attributes": ""
},
{
"Id": 3,
"Attributes": []
}
]
I only care about the "Attributes" field inside of the Json, not any other fields.
I also want to ensure for some Attributes that have an empty string "Attributes": "", they get mapped to an empty list and not a list with an empty string ([] not [""])
I also want to preserve the empty array values ([]) for the Attributes that already hold an empty array value.
This is what I have so far:
jsonb_set(
mycol,
'{Attributes}',
case when js ->> 'Attributes' <> ''
then jsonb_build_array(js ->> 'Attributes')
else '[]'::jsonb
end
)
However, Attributes: [] is getting mapped to ["[]"]
Use jsonb_array_elements() to iterate over the elements of each 'data' cell and jsonb_agg to regroup the transform values together into an array:
WITH test_data(js) AS (
VALUES ($$ [
{
"Id": 1,
"Attributes": "Test"
},
{
"Id": 2,
"Attributes": ""
},
{
"Id": 3,
"Attributes": []
}
]
$$::JSONB)
)
SELECT transformed_elem
FROM test_data
JOIN LATERAL (
SELECT jsonb_agg(jsonb_set(
elem,
'{Attributes}',
CASE
WHEN elem -> 'Attributes' IN ('""', '[]') THEN '[]'::JSONB
WHEN jsonb_typeof(elem -> 'Attributes') = 'string' THEN jsonb_build_array(elem -> 'Attributes')
ELSE elem -> 'Attributes'
END
)) AS transformed_elem
FROM jsonb_array_elements(test_data.js) AS f(elem) -- iterate over every element in the array
) s
ON TRUE
returns
[{"Id": 1, "Attributes": ["Test"]}, {"Id": 2, "Attributes": []}, {"Id": 3, "Attributes": []}]

Postgres: Delete object from anonymous jsonb array element

I have a table with 2 fields:
table documents
docu_id uuid
attachments jsonb
A sample data for the attachments jsonb column would be:
[
{
"size": 10,
"attach_id": "d3a21f904068"
},{
"Size": 0.143,
"attach_id": "5ba4b285565b"
}
]
I have seen many examples of how to update/delete a jsonb based on field name, but is it possible to delete an anonymous object from an anonymous array where "attach_id" = "X" and "docu_id"="Y":
delete from documents
where docu_id = "Y"
and
where attachments #> '[{"attach_id": "X"}]'
Ok found the solution so I'm sharing it here, (rextester link http://rextester.com/YICZ86369):
Inserting the data
create table documents(docu_id text, attachments jsonb);
insert into documents values
('001',
'[
{
"name": "uno",
"id":"1"
},
{
"name": "dos",
"id":"2"
},
{
"name": "tres",
"id":"3"
}
]'
),
('002',
'[
{
"name": "eins",
"id":"1"
},
{
"name": "zwei",
"id":"2"
}
]'
);
select * from documents;
The solution
UPDATE documents
SET attachments = attachments #-
array(
SELECT i
FROM generate_series(0, jsonb_array_length(attachments) - 1) AS i
WHERE (attachments->i->'id' = '"2"')
)::text[] /* cast as text */
where docu_id = '002';
select * from documents;

Postgres query to fetch records from table having a column of type Array of Objects

I have a table having two properties called id and categoryArray where id is of type number and categoryArray is a jsonb[] column and schema is as mentioned below:
"id": 1,
"categoryArray": [{
"Field1": "A",
"Field2": "A1"
},
{
"Field1": "B",
"Field2": "A2"
},
{
"Field1": "C",
"Field2": "A3"
}
]
The table will have multiple record with disctint ids and will have objects inside categoryArray.
I want to query in PostgreSQL all the ids where Field1 = A inside categoryArray object.
I have tried using jsonb_array_elements but not able to achieve expected result.
Do we have a possible way to query on basis of Field1 which is inside categoryArray object.
Since you are already using jsonb[], you can use following query to get records having value of Field1 = A using subscript generating function, generate_subscripts().
select
*
from
table
where
id in
(
select
id
from
(
select
id,
categoryArray,
generate_subscripts(categoryArray, 1) as s
FROM
table
) as i
where
categoryArray[s]->>'Field1' = 'A'
);