Postgres - Remove NULL values from a json array - postgresql

I'm building a JSON ARRAY from a table which has a JSON column and non-JSON columns.
Here is my sample data and query:
create table check_details(SHORT_NAME VARCHAR, UNIQUE_NO JSON,STATUS VARCHAR);
insert into check_details values('Anu','{"ID":"1e699-76af2"}','REJECTED');
select json_agg(json_strip_nulls(
json_build_object('Name',SHORT_NAME,
'IDS',
jsonb_build_array(case SIGN(position('ACCEPTED' in STATUS) ) when 1 then UNIQUE_NO::jsonb->>'ID' else json_typeof(NULL::json) end)
)))
from check_details;
I am getting this result:
[{"Name":"Anu","IDS":[null]}]
But I do not want to get "IDS":[null] part in my result when the value of the key IDS is NULL.
How can I achieve this result:
[{"Name":"Anu"}]
When IDS has a valid value, it has to be an array. Hence using jsonb_build_array.

This is because you are placing the result of your CASE statement in a JSON array, so it's a non-empty array containing a JSON null rather than null JSON value.
So you would need to stop that being an array if you want the NULL to be stripped:
SELECT
json_strip_nulls(
json_build_object(
'Name',
short_name,
'IDS',
CASE SIGN(position('ACCEPTED' IN status) )
WHEN 1 THEN (unique_no::jsonb->>'ID')::text
ELSE NULL
END
)
)
FROM
check_details;
json_strip_nulls
------------------
{"Name":"Anu"}
(1 row)
Note that json_strip_nulls() doesn't remove null values from JSON arrays.
Edit:
But as you require non-null values to show as an array, move the jsonb_build_array() function into the case statement
SELECT
json_strip_nulls(
json_build_object(
'Name',
short_name,
'IDS',
CASE SIGN(position('ACCEPTED' IN status) )
WHEN 1 THEN jsonb_build_array((unique_no::jsonb->>'ID')::text)
ELSE NULL
END
)
)
FROM
check_details;

Related

Update table with newly added column containing data from the same table old column, but modified (flattened) jsonb

So i've came across issue with having to migrate data from one column to "clone" of itself with different jsonb schema -> i need to parse the json from
["keynamed": [...{"type": "type_info", "value": "value_in_here"}]]into something plain object with key:value - dictionary like {"type_info": "value_in_here" ,...}
so far i've tried with subqueries and json functions in subquery + switch case to map "type" to "type_info" and then use jsonb_build_object(), but this takes data from the wole table and i need to have it on update with data from row - is there anything simpler than doing N subqueries closest way i've came with is:
select
jsonb_object_agg(t.k, t.v):: jsonb as _json
from
(
select
jsonb_build_object(type_, _value) as _json
from
(
select
_value,
CASE _type
...
END type_
from
(
select
(datasets ->> 'type') as _type,
datasets -> 'value' as _value
from
(
select
jsonb_array_elements(
values
-> 'keynamed'
) as datasets
from
table
) s
) s
) s
) s,
jsonb_each(_json) as t(k, v);
But i have no idea how to make it row specyfic and apply to simple update like:
UPDATE table
SET table.new_field = (subquery with parsed dict in json)
Any ideas/tips how to solve it with plain PSQL without any external support?
The expected output of the table would be:
id | old_value | new_value
----------------+-------------------------------------+------------------------------------
1 | ["keynamed": [...{"type": "type_info", "value": "value_in_here"}]] | {"type_info": "value_in_here" ,...}
According to postgres documents you can use update with select table and use join pattern update document
Sample:
UPDATE accounts SET contact_first_name = first_name,
contact_last_name = last_name
FROM salesmen WHERE salesmen.id = accounts.sales_id;
If I understand correctly, below query can help you. but I can't test because I haven't sample data and I don't know this query has syntax error or not.
update table t
set new_value = tmp._json
from (
select
id,
jsonb_object_agg(t.k, t.v):: jsonb as _json
from
(
select
id,
jsonb_build_object(type_, _value) as _json
from
(
select
id,
_value,
CASE _type
...
END type_
from
(
select
id,
(datasets ->> 'type') as _type,
datasets -> 'value' as _value
from
(
select
id,
jsonb_array_elements(
values
-> 'keynamed'
) as datasets
from
table
) s
) s
) s
) s,
jsonb_each(_json) as t(k, v)
group by id) tmp
where tmp.id = t.id;

Postgres - Query Specific Value in an Array of Arrays

I have column that is returning from my query that is an array of array. It looks like this.
array_vals: {{NULL,NULL,147.89,87.27,82.65,83.41,93.69,101.90,NULL,NULL}}
When I try to query in to my array first array I always get Null values.
select array_vals[0] from table
This returns null. But my desired results would be {NULL,NULL,147.89,87.27,82.65,83.41,93.69,101.90,NULL,NULL}
with test as (
select 1 as value, array_agg(array[1 , 3, 4]) as data
group by value
)
select data[1] from test
This returns null for me.

How to lower-case all the elements of a JSONB array of strings of each row in a table

I have a table with a field called "data" which is of JSONB type. The content of "data" is an object with one of the fields called "associated_emails", which is an array of strings.
I need to update the existing table so that the content of "associated_emails" is all lower-case. How to achieve that? This is my attempt so far (it triggers error: ERROR: cannot extract elements from a scalar)
update mytable my
set
"data" = safe_jsonb_set(
my."data",
'{associated_emails}',
to_jsonb(
lower(
(
SELECT array_agg(x) FROM jsonb_array_elements_text(
coalesce(
my."data"->'associated_emails',
'{}'::jsonb
)
) t(x)
)::text[]::text
)::text[]
)
)
where
my.mytype = 'something';
You would like to use JSONB_SET and UPDATE the column with something like given below below:
UPDATE jsonb_test
SET data = JSONB_SET(data, '{0,associated_emails}',
JSONB(LOWER(data ->> 'associated_emails'::TEXT)));

How to check if json inner field is EMPTY?

One of the column ( called details ) in my table is of jsonb data type and have data format somthing like this:
{"media_height":"350", "media_height":"450", "media_alt":"", "file_name":"myfile.jpeg"}
This field I am taking in case when because I want to mark the records of missing alt text.
SELECT
distinct ON ( p.property_name )
p.id, p.property_name,
CASE
WHEN mma.id IS NULL THEN 'Z'
WHEN mma.details->'media_alt'::TEXT IS NULL THEN 'NO'
ELSE 'YES' END as has_media_alt
FROM properties p
LEFT JOIN marketing_media_associations mma ON ( mma.reference_id = p.id )
GROUP BY p.id, p.property_name , mma.details->'media_alt', mma.id
ORDER BY p.property_name, has_media_alt ASC
The above query gives me accurate results for Z, but it never goes in NO block. What I am missing here?
An empty string is not the same as NULL, you probably want:
WHEN nullif(mma.details->>'media_alt', '') IS NULL THEN 'NO'
You don't need to cast to text, if you use ->> which returns the value as text directly.

Postgresql order by case when {someCase} then json type column

I need order result from select by few ways.
It's working when it's some column from table TenderItem.
But NOT working if it some key from json type column TenderItem.ItemInfo, f.e.
select * from "TenderItem" order by "ItemInfo" ->> 'Name'; -- working in simple select
with sortingParams (columnName, isAsc) AS (VALUES ('ItemId', true))
select *
FROM "TenderItem" i, sortingParams
WHERE i."TenderId" = 1
AND i."ItemInfo" ->> 'Name' like '%Transcend%'
ORDER BY
case
WHEN columnName like '%ItemId%' THEN i."ItemId" --*work
WHEN columnName like '%ABCSegment%' THEN i."ItemInfo" ->> 'ABCSegment' --**
end desc;
**on this string i have message "ERROR: CASE types bigint and text cannot be matched"
It's not clear how you'd sort the itemID against the ItemInfo segment (unless this points to an item id) since they are not all text values (and if they are all text but some are text strings like '12345' then you do not want to use text sort since then '100' would come before '99'). You probably want them to be separate sort conditions to give more flexibility in ordering:
with sortingParams (columnName, isAsc) AS (VALUES ('ItemId', true))
select *
FROM "TenderItem" i, sortingParams
WHERE i."TenderId" = 1
AND i."ItemInfo" ->> 'Name' like '%Transcend%'
ORDER BY
case
WHEN columnName like '%ItemId%' THEN i."ItemId"::bigint end asc nulls last --puts things with an itemID ahead of those without, or could use nulls first
--if two items have same item id, then sort by segment
, case
WHEN columnName like '%ABCSegment%' THEN i."ItemInfo" ->> 'ABCSegment'
end desc;
Note that each sort condition must give the same datatype for each row being evaluated! This is what gives the error your described where the case statement gives a biting for ItemId and a text value for ItemInfo ->> 'ABCSegment'
ItemId is BIGINT and i."ItemInfo" ->> 'ABCSegment' is text which are incompatible types to do sorting on.
Try casting the value explicitly to BIGINT, i.e
..WHEN columnName like '%ABCSegment%' THEN (i."ItemInfo" ->> 'ABCSegment')::BIGINT
or make i."ItemId" a text if the above fails due to invalid bigint values.
i."ItemId"::TEXT