How to return data in JSON hierarchy? - postgresql

For the following data tables and function in pg12
create table orders
(
orderid integer, grandtotal numeric(10, 2)
)
create table odetails
(
orderid integer, detailid integer, description text
)
create function jorder() returns json as
begin
return query select od.orderid, od.grandtotal, ds.detailid, ds.description
from orders od
join odetails ds on od.orderid = ds.orderid;
end;
How do I get return data in a JSON hierarchy like below?
[{
"orderid": 1,
"grandtotal": 100.00,
"details": [{
"detailid": 11,
"description": "pen"
},
{
"detailid": 12,
"description": "orange"
}
]
}, {
"orderid": 2,
"grandtotal": 200.00,
"details": [{
"detailid": 21,
"description": "book"
},
{
"detailid": 22,
"description": "coffee"
},
{
"detailid": 23,
"description": "tea"
}
]
}]

You should look into json functions . You need json_build_object to form the objects and json_agg to aggregate them into json array:
CREATE FUNCTION jorder()
RETURNS json
LANGUAGE sql
AS $$
SELECT
json_agg(orders.order)
FROM (
SELECT
json_build_object(
'orderid', od.orderid,
'grandtotal', od.grandtotal,
'details', array_agg(
json_build_object(
'detailid', ds.detailid,
'description', ds.description
)
)
) as order
FROM
orders od
JOIN odetails ds on od.orderid = ds.orderid
GROUP BY
od.orderid, od.grandtotal
) as orders
$$;
Example at db<>fiddle

Related

How to reverse query for a record, returning all of its parent and their parents?

Suppose the following,
create schema if not exists my_schema;
CREATE TABLE IF NOT EXISTS my_schema.category (
id serial PRIMARY KEY,
category_name VARCHAR (255) NOT NULL,
subcategories BIGINT[] DEFAULT ARRAY[]::BIGINT[]
);
INSERT INTO my_schema.category VALUES
(
1,
'Pickup/dropoff',
'{}'
),
(
2,
'Electrician',
'{}'
),
(
3,
'Around the house',
'{2}'
),
(
4,
'Personal',
'{3}'
);
I'm trying to create breadcrumbs on the frontend. For instance, if Electrician is the selected category, I'd like to render something like Personal > Around the house > Electrician.
Ideally, I'd like the query to return something like,
{
"id": 2,
"breadcrumbs": "Personal.Around the house.Electrician"
}
This way, I could just do breadcrumbs.split('.').map(. . .).
Another result that would work is something like,
{
"id": 2,
"category_name": "Electrician"
"breadcrumbs": [
{
"id": 4,
"category_name": "Personal"
},
{
"id": 3,
"category_name": "Around the house"
},
]
}
Or even,
{
"breadcrumbs": [
{
"id": 4,
"category_name": "Personal"
},
{
"id": 3,
"category_name": "Around the house"
},
{
"id": 2,
"category_name": "Electrician"
}
]
}
How can I attain such a result?
You can do it using WITH RECURSIVE as follows :
to get any breadcrumbs just put its id on the first select where id = 2
WITH RECURSIVE cte(n, id, selectedCat) AS
(
SELECT 1, id, id::text
from category
where id = 2
UNION ALL
SELECT n+1, e.id, ep.selectedCat
FROM cte AS ep JOIN category AS e
ON ep.id = ANY (e.subcategories)
)
SELECT selectedCat, string_agg(category_name, ',') as breadcrumbs FROM (
SELECT selectedCat, category_name
FROM cte
inner join category c on c.id = cte.id
order by n desc
) AS s
GROUP BY selectedCat
demo here

Update property of object in jsonb array and keep other properties

I have a postgres table with a jsonb colum like this:
create table if not exists doc
(
id uuid not null
constraint pkey_doc_id
primary key,
data jsonb not null
);
INSERT INTO doc (id, data) VALUES ('3cf40366-ea58-402d-b63b-c9d6fdf99ec8', '{"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8", "Tags": [{"Key": "inoivce", "Value": "70086"},{"Key": "customer", "Value": "100233"}] }' );
INSERT INTO doc (id, data) VALUES ('ae2d1119-adb9-41d2-96e9-53445eaf97ab', '{"Id": "ae2d1119-adb9-41d2-96e9-53445eaf97ab", "Tags": [{"Key": "project", "Value": "12345"},{"Key": "customer", "Value": "100233"}]}' );b9-41d2-96e9-53445eaf97ab", "Tags": [{"Key": "customer", "Value": "100233"}]}' )
Tags.Key in the first row contains a typo inoivce which I want to fix to invoice:
{
"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8",
"Tags": [{
"Key": "inoivce",
"Value": "70086"
},{
"Key": "customer",
"Value": "100233"
}]
}
I tried this:
update doc set data = jsonb_set(
data,
'{"Tags"}',
$${"Key":"invoice"}$$
) where data #> '{"Tags": [{ "Key":"inoivce"}]}';
The typo gets fixed but I'm loosing the other Tags elements in the array:
{
"Id": "3cf40366-ea58-402d-b63b-c9d6fdf99ec8",
"Tags": [{"Key": "invoice"}]
}
How can I fix the typo without removing the other elements of the Tags array?
Dbfiddle for repro.
One possible solution, not so obvious : we need a CTE because the idea here is to loop on the 'Tags' jsonb array elements using the jsonb_agg aggregate function to rebuild the array, but the SET clause of an UPDATE doesn't accept aggregate functions ...
WITH list AS
( SELECT d.id, (d.data - 'Tags') || jsonb_build_object('Tags', jsonb_agg(jsonb_set(e.content, '{Key}' :: text[], to_jsonb(replace(e.content->>'Key', 'inoivce', 'invoice'))) ORDER BY e.id)) AS data
FROM doc AS d
CROSS JOIN LATERAL jsonb_array_elements(d.data->'Tags') WITH ORDINALITY AS e(content, id)
WHERE d.data #? '$.Tags[*] ? (exists(# ? (#.Key == "inoivce")))'
GROUP BY d.id, d.data
)
UPDATE doc AS d
SET data = l.data
FROM list AS l
WHERE d.id = l.id
see the result in dbfiddle

PostgreSQL how to merge jsonb keys and count values

I have a table that looks somthing like the following:
id
name
category
specs (jsonb)
1
product1
phones
{ "brand": "brand1", "color": "red", "size": 5, "memory": "8GB"}
2
product2
phones
{ "brand": "brand1", "color": "white", "size": 7, "memory": "8GB"}
3
product3
laptops
{ "brand": "brand20", "storage": "SSD", "os": "os1" , "memory": "32GB"}
My desired output given a specific category
{
"brand": {
"brand1": 1,
"brand2": 1
},
"color": {
"red": 1,
"white": 5,
},
"memory": {
"8gb": 2,
}
}
Blow out the specs column with jsonb_each_text(), calculate the counts, and then reassemble with jsonb_object_agg() (the first two CTEs could be combined into one, but I left them verbose for illustration):
with blowout as (
select s.category, j.key, j.value
from somthing s
cross join lateral jsonb_each_text(s.specs) as j(key, value)
), counts as (
select category, key, value, count(1) as cnt
from blowout
group by category, key, value
), agg_specs as (
select category, key, jsonb_object_agg(value, cnt) as counts
from counts
group by category, key
)
select category, jsonb_object_agg(key, counts) as output
from agg_specs
group by category
;
db<>fiddle here

Is there a magic function with can extract all select keys/nested keys including array from jsonb

Given a jsonb and set of keys how can I get a new jsonb with required keys.
I've tried extracting key-values and assigned to text[] and then using jsonb_object(text[]). It works well, but the problem comes when a key has a array of jsons.
create table my_jsonb_table
(
data_col jsonb
);
insert into my_jsonb_table (data_col) Values ('{
"schemaVersion": "1",
"Id": "20180601550002",
"Domains": [
{
"UID": "29aa2923",
"quantity": 1,
"item": "book",
"DepartmentDomain": {
"type": "paper",
"departId": "10"
},
"PriceDomain": {
"Price": 79.00,
"taxA": 6.500,
"discount": 0
}
},
{
"UID": "bbaa2923",
"quantity": 2,
"item": "pencil",
"DepartmentDomain": {
"type": "wood",
"departId": "11"
},
"PriceDomain": {
"Price": 7.00,
"taxA": 1.5175,
"discount": 1
}
}
],
"finalPrice": {
"totalTax": 13.50,
"total": 85.0
},
"MetaData": {
"shopId": "1405596346",
"locId": "95014",
"countryId": "USA",
"regId": "255",
"Date": "20180601"
}
}
')
This is what I am trying to achieve :
SELECT some_magic_fun(data_col,'Id,Domains.UID,Domains.DepartmentDomain.departId,finalPrice.total')::jsonb FROM my_jsonb_table;
I am trying to create that magic function which extracts the given keys in a jsonb format, as of now I am able to extract scalar items and put them in text[] and use jsonb_object. but don't know how can I extract all elements of array
expected output :
{
"Id": "20180601550002",
"Domains": [
{
"UID": "29aa2923",
"DepartmentDomain": {
"departId": "10"
}
},
{
"UID": "bbaa2923",
"DepartmentDomain": {
"departId": "11"
}
}
],
"finalPrice": {
"total": 85.0
}
}
I don't know of any magic. You have to rebuild it yourself.
select jsonb_build_object(
-- Straight forward
'Id', data_col->'Id',
'Domains', (
-- Aggregate all the "rows" back together into an array.
select jsonb_agg(
-- Turn each array element into a new object
jsonb_build_object(
'UID', domain->'UID',
'DepartmentDomain', jsonb_build_object(
'departId', domain#>'{DepartmentDomain,departId}'
)
)
)
-- Turn each element of the Domains array into a row
from jsonb_array_elements( data_col->'Domains' ) d(domain)
),
-- Also pretty straightforward
'finalPrice', jsonb_build_object(
'total', data_col#>'{finalPrice,total}'
)
) from my_jsonb_table;
This probably is not a good use of a JSON column. Your data is relational and would better fit traditional relational tables.

Postgres: Delete object from anonymous jsonb array element

I have a table with 2 fields:
table documents
docu_id uuid
attachments jsonb
A sample data for the attachments jsonb column would be:
[
{
"size": 10,
"attach_id": "d3a21f904068"
},{
"Size": 0.143,
"attach_id": "5ba4b285565b"
}
]
I have seen many examples of how to update/delete a jsonb based on field name, but is it possible to delete an anonymous object from an anonymous array where "attach_id" = "X" and "docu_id"="Y":
delete from documents
where docu_id = "Y"
and
where attachments #> '[{"attach_id": "X"}]'
Ok found the solution so I'm sharing it here, (rextester link http://rextester.com/YICZ86369):
Inserting the data
create table documents(docu_id text, attachments jsonb);
insert into documents values
('001',
'[
{
"name": "uno",
"id":"1"
},
{
"name": "dos",
"id":"2"
},
{
"name": "tres",
"id":"3"
}
]'
),
('002',
'[
{
"name": "eins",
"id":"1"
},
{
"name": "zwei",
"id":"2"
}
]'
);
select * from documents;
The solution
UPDATE documents
SET attachments = attachments #-
array(
SELECT i
FROM generate_series(0, jsonb_array_length(attachments) - 1) AS i
WHERE (attachments->i->'id' = '"2"')
)::text[] /* cast as text */
where docu_id = '002';
select * from documents;