I have a postgres table with a jsonb colum like this:
create table if not exists doc
(
id uuid not null
constraint pkey_doc_id
primary key,
data jsonb not null
);
INSERT INTO doc (id, data) VALUES ('3cf40366-ea58-402d-b63b-c9d6fdf99ec8', '{"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8", "Tags": [{"Key": "inoivce", "Value": "70086"},{"Key": "customer", "Value": "100233"}] }' );
INSERT INTO doc (id, data) VALUES ('ae2d1119-adb9-41d2-96e9-53445eaf97ab', '{"Id": "ae2d1119-adb9-41d2-96e9-53445eaf97ab", "Tags": [{"Key": "project", "Value": "12345"},{"Key": "customer", "Value": "100233"}]}' );b9-41d2-96e9-53445eaf97ab", "Tags": [{"Key": "customer", "Value": "100233"}]}' )
Tags.Key in the first row contains a typo inoivce which I want to fix to invoice:
{
"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8",
"Tags": [{
"Key": "inoivce",
"Value": "70086"
},{
"Key": "customer",
"Value": "100233"
}]
}
I tried this:
update doc set data = jsonb_set(
data,
'{"Tags"}',
$${"Key":"invoice"}$$
) where data #> '{"Tags": [{ "Key":"inoivce"}]}';
The typo gets fixed but I'm loosing the other Tags elements in the array:
{
"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8",
"Tags": [{"Key": "invoice"}]
}
How can I fix the typo without removing the other elements of the Tags array?
Dbfiddle for repro.
One possible solution, not so obvious : we need a CTE because the idea here is to loop on the 'Tags' jsonb array elements using the jsonb_agg aggregate function to rebuild the array, but the SET clause of an UPDATE doesn't accept aggregate functions ...
WITH list AS
( SELECT d.id, (d.data - 'Tags') || jsonb_build_object('Tags', jsonb_agg(jsonb_set(e.content, '{Key}' :: text[], to_jsonb(replace(e.content->>'Key', 'inoivce', 'invoice'))) ORDER BY e.id)) AS data
FROM doc AS d
CROSS JOIN LATERAL jsonb_array_elements(d.data->'Tags') WITH ORDINALITY AS e(content, id)
WHERE d.data #? '$.Tags[*] ? (exists(# ? (#.Key == "inoivce")))'
GROUP BY d.id, d.data
)
UPDATE doc AS d
SET data = l.data
FROM list AS l
WHERE d.id = l.id
see the result in dbfiddle
Related
I have json like this that based on the value of the enabled variable I want to insert into a table.
{
"Name": "Shop1",
"Id": "1",
"enabled": false
},
{
"Name": "Shop2",
"Id": "2",
"enabled": true
}
]
In a query I would like to select any items that are true and insert into a table. The table schema I want to insert into looks like
id,enabled
how to query the json and push into the rows of the other table?
You can use jsonb_array_elements() for that:
insert into the_table (id, enabled)
select (e ->> 'Id')::int,
(e ->> 'enabled')::boolean
from jsonb_array_elements('[
{"Name": "Shop1","Id": "1","enabled": false},
{"Name": "Shop2","Id": "2","enabled": true}
]') as t(e)
where e ->> 'enabled' = 'true';
I have field extra with type jsonb in my table product. And I need get uniq key with uniq values for each key from all products row. Example data in extra field
{"SIZE": "110/116", "COLOUR": "Vit", "GENDER": "female", "AGE_GROUP": "Kids", "ALTERNATIVE_IMAGE": "some_path"}
for now I use query like this
select DISTINCT e.key, array_agg(DISTINCT e.value) as fields
from products AS p
join jsonb_each_text(p.extras) e on true
GROUP BY e.key
Ad have respnse (small part with some keys in full response all keys are present) like this
[
{
"key": "AGE_GROUP",
"fields": "{Adult,children,Kids}"
},
{
"key": "GENDER",
"fields": "{female,male,man}"
}
]
how to change it to array for fields alias ?
like this
[
{
"AGE_GROUP": ["Adult","children","Kids"]
},
{
"GENDER": ["female","male","man"]
}
]
or maybe whould be great like this
[
"some_alias": [{"AGE_GROUP": "Adult", "AGE_GROUP": "children", "AGE_GROUP": "Kids"}],
"some_alias": [{"GENDER": "female", "GENDER": "male", "GENDER": "man"}]
]
This will get you the former form I think:
select jsonb_agg(jsonb_build_object(k,v)) from (
select DISTINCT e.key, jsonb_agg(DISTINCT e.value) as fields
from products AS p
join jsonb_each_text(p.extras) e on true
GROUP BY e.key
) b(k,v);
Best regards,
Bjarni
I have some nested JSONB object/arrays that I want to update/insert into array completely when a given set of values match.
I've tried this command UPDATE asset SET detail = jsonb_set(detail, '{"date": "01/01/2019"}', '{"name": "ABC", "date": "01/01/2019"}');, but doesn't work since the command is looking for an index and using || simply appends the new object into the array instead of replacing it.
Schemas
Table
CREATE TABLE asset (
id uuid NOT NULL DEFAULT uuid_generate_v1(),
detail jsonb NULL,
events_connected jsonb NULL
);
JSONB Schemas
[{
"name": "Stackoverflow",
"date": "01/01/2019"
}]
A little more in depth
[{
"id": "12345",
"events": [{"type": 0, "date": "01/01/01", "ratio": 1.0}]
}]
EDIT:
I've played around with this code which almost works, but it updates all rows with the new object instead of simply inserting/updating the event id and keys that matches. The sub query that finds the matching params seem to work.
update
asset
set
events_connected =
jsonb_set(
events_connected,
array[elem_index::text, 'events', nested_elem_index::text],
'{"type": 2, "ratio": 5.0, "tax": 1, "date": "01/02/2019"}'::jsonb,
true)
from (
select
(pos - 1) as "elem_index",
(pos2 - 1) as "nested_elem_index",
elem->>'id' as "id"
from
asset,
jsonb_array_elements(events_connected) with ordinality arr(elem, pos),
jsonb_array_elements(elem->'events') with ordinality arr2(elem2, pos2)
WHERE
elem->>'id' = '12345' AND
elem2->>'type' = '1' AND
elem2->>'date' = '01/02/2019'
) sub
WHERE
sub."elem_index" IS NOT NULL AND sub."nested_elem_index" IS NOT NULL;
Fiddle: https://dbfiddle.uk/?rdbms=postgres_11&fiddle=1b3f3b0c7a9f0adda50c54011880b61d
Below JSON is one of the column of type JSONB in my table 'logic', I want to query to check how many rows are there with type: QUESTION (any entry within conditions).
{
"name": null,
"conditions": [
{
"type": "QUESTION",
"question": {
}
},
{
"type": "QUESTION",
"question": {
}
},
{
"type": "FIELD",
"question": {
}
}
],
"expression": "A"
}
If you want to check the number of times "type": "QUESTION" entry appears within conditions of the jsonb column throughout the table.
select count(*) FROM logic CROSS JOIN LATERAL
jsonb_array_elements(jsonb_col->'conditions')as j(typ)
WHERE j->>'type' = 'QUESTION'
If you want to check the number of times "type": "QUESTION" entry appears within conditions for each row.
select jsonb_col,count(*) FROM logic CROSS JOIN LATERAL
jsonb_array_elements(jsonb_col->'conditions')as j(typ)
WHERE j->>'type' = 'QUESTION'
group by jsonb_col
If you want to check how many rows have at least one entry within conditions with
'type' = 'QUESTION',
select count(*) FROM
(
select DISTINCT jsonb_col FROM logic CROSS JOIN LATERAL
jsonb_array_elements(jsonb_col->'conditions')as j(typ)
WHERE j->>'type' = 'QUESTION'
)s;
Use the query which you find is appropriate for you
Demo
I have a table with 2 fields:
table documents
docu_id uuid
attachments jsonb
A sample data for the attachments jsonb column would be:
[
{
"size": 10,
"attach_id": "d3a21f904068"
},{
"Size": 0.143,
"attach_id": "5ba4b285565b"
}
]
I have seen many examples of how to update/delete a jsonb based on field name, but is it possible to delete an anonymous object from an anonymous array where "attach_id" = "X" and "docu_id"="Y":
delete from documents
where docu_id = "Y"
and
where attachments #> '[{"attach_id": "X"}]'
Ok found the solution so I'm sharing it here, (rextester link http://rextester.com/YICZ86369):
Inserting the data
create table documents(docu_id text, attachments jsonb);
insert into documents values
('001',
'[
{
"name": "uno",
"id":"1"
},
{
"name": "dos",
"id":"2"
},
{
"name": "tres",
"id":"3"
}
]'
),
('002',
'[
{
"name": "eins",
"id":"1"
},
{
"name": "zwei",
"id":"2"
}
]'
);
select * from documents;
The solution
UPDATE documents
SET attachments = attachments #-
array(
SELECT i
FROM generate_series(0, jsonb_array_length(attachments) - 1) AS i
WHERE (attachments->i->'id' = '"2"')
)::text[] /* cast as text */
where docu_id = '002';
select * from documents;