PostgreSQL using COALESCE or other Conditional Expressions to SET field - postgresql

I have a PostgreSQL v10 DB with the following values:
CREATE TABLE test (
id INT,
custom_fields jsonb not null default '{}'::jsonb,
guest_profile_id character varying(100)
);
INSERT INTO test (id, custom_fields) VALUES (1, '[{"protelSurname": "Smith", "servicio_tags": ["protel-info"], "protelUniqueID": "[{\"ID\":\"Test1-ID\",\"Type\":\"21\",\"ID_Context\":\"GHA\"}{\"ID\":\"4842148\",\"Type\":\"1\",\"ID_Context\":\"protelIO\"}]", "protelGivenName": "Seth"}, {"value": "Test", "display_name": "Traces", "servicio_tags": ["trace"]}, {...}]');
INSERT INTO test (id, custom_fields) VALUES (2, '[{"protelSurname": "Smith", "servicio_tags": ["protel-info"], "protelUniqueID": "[{\"ID\":\"Test2-ID\",\"Type\":\"21\",\"ID_Context\":\"GHA\"},{\"ID\":\"4842148\",\"Type\":\"1\",\"ID_Context\":\"protelIO\"}]", "protelGivenName": "Seth"}, {"value": "Test2", "display_name": "Traces", "servicio_tags": ["trace"]}, {...}]');
INSERT INTO test (id, custom_fields) VALUES (3, '[{"value": "Test3-ID", "display_name": "Test", "servicio_tags": ["profile-id"]}, {...}]');
INSERT INTO test (id, custom_fields) VALUES (4, '[{"value": "Test4-ID", "display_name": "Test", "servicio_tags": ["person-name"]}, {...}]');
I have a query, which works and saves values from the custom_field row to the guest_profile_id column in the same row:
UPDATE guest_group
SET guest_profile_id = (
SELECT x ->> 'ID'
FROM jsonb_array_elements(custom_fields) AS field,
jsonb_array_elements((field ->> 'protelUniqueID') :: jsonb) AS dd(x)
WHERE value #> '{"servicio_tags": ["protel-info"]}'::jsonb
AND x->>'ID_Context' = 'protelIO'
);
But this only works for the first two rows. Therefor I want to use the next query-snippets in order to copy Test3-ID in row 3 to the guest_profile_id column and Test4-ID in row 4 to the guest_profile_id column.
1.
SELECT field ->>'value'
cross join lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["profile-id"]}'::jsonb
2.
SELECT field ->>'value'
cross join lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["person-name"]}'::jsonb
My problem: I do not know how to use COALESCE or other Conditional Expressions in order to chain those small queries. Because this should be possible if the first query uses NULL to set the field, COALESCE should help me to ignore this value and jump to the next query-snippet.
Desires result: I want that all the TestX-ID values from the table above are copied to the guest_profile_id column into the same row.
My try:
UPDATE test
SET guest_profile_id = COALESCE((
SELECT x ->> 'ID'
FROM jsonb_array_elements(custom_fields) AS field,
jsonb_array_elements((field ->> 'protelUniqueID') :: jsonb) AS dd(x)
WHERE value #> '{"servicio_tags": ["protel-info"]}'::jsonb
AND x->>'ID_Context' = 'protelIO'),(
SELECT field ->>'value'
cross join lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["profile-id"]}'::jsonb),(
SELECT field ->>'value'
cross join lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["person-name"]}'::jsonb));
Gives me:
ERROR: syntax error at or near "cross"
LINE 9: cross join lateral jsonb_array_elements(custom_fields) ...
Thanks a lot for the help!

Some more brackets did the job around the select_queries:
guest_profile_id = COALESCE((first_select_query),((second_select_query)), ((…)))
UPDATE test
SET guest_profile_id = COALESCE((
SELECT x ->> 'ID'
FROM jsonb_array_elements(custom_fields) AS field,
jsonb_array_elements((field ->> 'protelUniqueID') :: jsonb) AS dd(x)
WHERE value #> '{"servicio_tags": ["protel-info"]}'::jsonb
AND x->>'ID_Context' = 'protelIO' LIMIT 1),((
SELECT field ->>'value'
FROM jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["profile-id"]}'::jsonb LIMIT 1)), ((
SELECT field ->>'value'
FROM jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["person-name"]}'::jsonb LIMIT 1
)));
Here the link to a fiddle: Query which works

Related

Possible to set explicit type in CTE?

I can create a CTE that contains named columns of inferred type with:
WITH employees(name, salary) AS (
SELECT 'Paul', 10000
) SELECT * FROM employees
Is there a way to specify the type in the CTE itself, for example doing:
WITH employees(name , salary DECIMAL(8,2)) AS (
SELECT 'Paul', 10000
) SELECT * FROM employees
Or do I have to always do it within the SELECT statement, such as:
WITH employees(name, salary) AS (
SELECT 'Paul', DECIMAL(8,2) '10000'
) SELECT * FROM employees
You can cast the values in the CTE:
WITH employees(name, salary) AS (
select 'Paul', cast(10000 as decimal(8,2))
)
SELECT *
FROM employees
Or using a VALUES clause which removes the need for a UNION if multiple rows are needed.
WITH employees(name, salary) AS (
values
('Paul', cast(10000 as decimal(8,2))),
('Peter', 5000)
)
SELECT *
FROM employees
The cast is only needed in the first row, because that defines the data type of that column for all other rows from the VALUES clause (or a SELECT with a UNION)
I've run you SQL in SQl Fiddle and the only
way to coerce the type in the resultset I've found
is below:
WITH employees(name , salary ) AS
(
SELECT 'Paul', cast( 10000 as DECIMAL(8,2))
)
SELECT * FROM employees

Query matching property in another table given a comma-separated string in JSONB

I would like to look up a property in another table B, where the source is part of a comma-separated string inside a JSONB column of table A.
create table option
(
optionid bigint not null primary key,
attributevalues jsonb default '{}'::jsonb
);
create table district
(
districtid bigint not null primary key,
uid varchar(11) not null,
name varchar(230) not null unique
);
INSERT into option values (1, '{"value": "N8UXIAycxy3,uVwyu3R4nZG,fuja8k8PCFO,y0eUmlYp7ey", "attribute": {"id": "K54wAf6EX0s"}}'::jsonb);
INSERT INTO district (districtid, uid, name) VALUES
(1, 'N8UXIAycxy3', 'district1'),
(2, 'uVwyu3R4nZG', 'district2'),
(3, 'fuja8k8PCFO', 'district3'),
(4, 'y0eUmlYp7ey', 'district4');
I can get all the items split by , but how do I "join" to look up the name (e.g. N8UXIAycxy3 --> district1)?
I tried to "join" in a traditional sense but this will not work as the district_uid is not accessible for the query as such:
SELECT UNNEST(STRING_TO_ARRAY(co.attributevalues #>> '{"K54wAf6EX0s", "value"}', ',')) AS district_uid
FROM option o
JOIN district d on district_uid = d.uid;
I would like to have the query result: district1,district2,district3,district4. Is this possible or do I need a loop?
DB Fiddle
You need to convert to array the comma separated string, i.e. attributevalues->>'value':
select name
from option
cross join unnest(string_to_array(attributevalues->>'value', ',')) as district_uid
join district on uid = district_uid
DB fiddle.

PostgreSQL transform value from jsonb column to other column

I have a PostgreSQL database v10 with the following data:
CREATE TABLE test (
id INT,
custom_fields jsonb not null default '{}'::jsonb,
guest_profile_id character varying(100)
);
INSERT INTO test (id, custom_fields) VALUES (1, '[{"protelSurname": "Smith", "servicio_tags": ["protel-info"], "protelUniqueID": "[{\"ID\":\"Test1-ID\",\"Type\":\"21\",\"ID_Context\":\"GHA\"}{\"ID\":\"4842148\",\"Type\":\"1\",\"ID_Context\":\"protelIO\"}]", "protelGivenName": "Seth"}, {"value": "Test", "display_name": "Traces", "servicio_tags": ["trace"]}, {...}]');
INSERT INTO test (id, custom_fields) VALUES (2, '[{"protelSurname": "Smith", "servicio_tags": ["protel-info"], "protelUniqueID": "[{\"ID\":\"Test2-ID\",\"Type\":\"21\",\"ID_Context\":\"GHA\"},{\"ID\":\"4842148\",\"Type\":\"1\",\"ID_Context\":\"protelIO\"}]", "protelGivenName": "Seth"}, {"value": "Test2", "display_name": "Traces", "servicio_tags": ["trace"]}, {...}]');
INSERT INTO test (id, custom_fields) VALUES (3, '[{"value": "Test3-ID", "display_name": "Test", "servicio_tags": ["person-name"]}, {...}]');
INSERT INTO test (id, custom_fields) VALUES (4, '[{"value": "Test4-ID", "display_name": "Test", "servicio_tags": ["profile-id"]}, {...}]');
There are way more records in the real table.
Goal: I want to transfer the TestX-ID values into the column guest_profile_id in the same row. And only those values not the other JSONB objects or values etc.
My try:
do $$
declare
colvar varchar;
begin
select x ->> 'ID' from (select jsonb_array_elements(f) from (
select (field ->>'protelUniqueID')::jsonb f
FROM guest_group gg,
lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["protel-info"]}'::jsonb
) d(f)) dd(x)
where x->>'ID_Context'='protelIO'
into colvar;
raise notice 'colvar: %', colvar;
end
$$;
execute format('UPDATE guest_group SET guest_profile_id = %s, colvar);
My Result: It only takes Test1-ID and stores it in all rows in the guest_profile_id column.
My Problem: I want to store each TestX-ID in the custom_fields column into the guest_profile_id column in the same row.
My assumption: I need to add a loop to this query. If the query up there does not find any value, the loop should try the next query: e.g.:
SELECT field ->>'value'
FROM guest_group gg
cross join lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["profile-id"]}'::jsonb
And then the next:
SELECT field ->>'value'
FROM guest_group gg
cross join lateral jsonb_array_elements(custom_fields) AS field
WHERE value #> '{"servicio_tags": ["person-name"]}'::jsonb
When all TestX-ID values are copied into the guest_profile_id column in the same row, the goal is reached.
How can I put all this together? Thanks a lot for the help.
I want to store each TestX-ID in the custom_fields column into the guest_profile_id column in the same row.
No need for PL/PGSQL, loops or dynamic sql. Just use a single query of the form
UPDATE guest_group
SET guest_profile_id = (/* complex expression */);
In your case, with that complex expression it amounts to
UPDATE guest_group
SET guest_profile_id = (
SELECT x ->> 'ID'
FROM jsonb_array_elements(custom_fields) AS field,
jsonb_array_elements(field ->> 'protelUniqueID') AS dd(x)
WHERE value #> '{"servicio_tags": ["protel-info"]}'::jsonb
AND x->>'ID_Context' = 'protelIO'
);
If the query up there does not find any value, it should try the next query
You can use the COALESCE function for that, or add some OR conditions to your query, or even use a UNION. Alternatively, add a WHERE guest_profile_id IS NULL to the update statement to exclude those rows that already have a value, and do multiple successive updates.

How to insert a single row in the parent table and then multiple rows in the child table in single SQL in PostgreSQL?

Please, find below my schema:
CREATE TABLE reps (
id SERIAL PRIMARY KEY,
rep TEXT NOT NULL UNIQUE
);
CREATE TABLE terms (
id SERIAL PRIMARY KEY,
terms TEXT NOT NULL UNIQUE
);
CREATE TABLE shipVia (
id SERIAL PRIMARY KEY,
ship_via TEXT NOT NULL UNIQUE
);
CREATE TABLE invoices (
id SERIAL PRIMARY KEY,
customer TEXT NOT NULL CONSTRAINT customerNotEmpty CHECK(customer <> ''),
term_id INT REFERENCES terms,
rep_id INT NOT NULL REFERENCES reps,
ship_via_id INT REFERENCES shipVia,
...
item_count INT NOT NULL,
modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
version INT NOT NULL DEFAULT 0
);
CREATE TABLE invoiceItems (
id SERIAL PRIMARY KEY,
invoice_id INT NOT NULL REFERENCES invoices ON DELETE CASCADE,
name TEXT NOT NULL CONSTRAINT nameNotEmpty CHECK(name <> ''),
description TEXT,
qty INT NOT NULL CONSTRAINT validQty CHECK (qty > 0),
price DOUBLE PRECISION NOT NULL
);
I am trying to insert an invoice along with its invoice items in one SQL using writable CTE. I am currently stuck with the following SQL statement:
WITH new_invoice AS (
INSERT INTO invoices (id, customer, term_id, ship_via_id, rep_id, ..., item_count)
SELECT $1, $2, t.id, s.id, r.id, ..., $26
FROM reps r
JOIN terms t ON t.terms = $3
JOIN shipVia s ON s.ship_via = $4
WHERE r.rep = $5
RETURNING id
) INSERT INTO invoiceItems (invoice_id, name, qty, price, description) VALUES
(new_invoice.id,$27,$28,$29,$30)
,(new_invoice.id,$31,$32,$33,$34)
,(new_invoice.id,$35,$36,$37,$38);
Of course, this SQL is wrong, here is what PostgreSQL 9.2 has to say about it:
ERROR: missing FROM-clause entry for table "new_invoice"
LINE 13: (new_invoice.id,$27,$28,$29,$30)
^
********** Error **********
ERROR: missing FROM-clause entry for table "new_invoice"
SQL state: 42P01
Character: 704
Is it possible at all?
EDIT 1
I am trying the following version:
PREPARE insert_invoice_3 AS WITH
new_invoice AS (
INSERT INTO invoices (id, customer, term_id, ship_via_id, rep_id, ..., item_count)
SELECT $1, $2, t.id, s.id, r.id, ..., $26
FROM reps r
JOIN terms t ON t.terms = $3
JOIN shipVia s ON s.ship_via = $4
WHERE r.rep = $5
RETURNING id
),
v (name, qty, price, description) AS (
VALUES ($27,$28,$29,$30)
,($31,$32,$33,$34)
,($35,$36,$37,$38)
)
INSERT INTO invoiceItems (invoice_id, name, qty, price, description)
SELECT new_invoice.id, v.name, v.qty, v.price, v.description
FROM v, new_invoice;
And here is what I get in return:
ERROR: column "qty" is of type integer but expression is of type text
LINE 19: SELECT new_invoice.id, v.name, v.qty, v.price, v.descriptio...
^
HINT: You will need to rewrite or cast the expression.
********** Error **********
ERROR: column "qty" is of type integer but expression is of type text
SQL state: 42804
Hint: You will need to rewrite or cast the expression.
Character: 899
I guess v (name, qty, price, description) is not enough, the data types must be specified as well. However, v (name, qty INT, price, description) does not work - syntax error.
EDIT 2
Next, I have just tried the second version:
PREPARE insert_invoice_3 AS WITH
new_invoice AS (
INSERT INTO invoices (id, customer, term_id, ship_via_id, rep_id, ..., item_count)
SELECT $1, $2, t.id, s.id, r.id, ..., $26
FROM reps r
JOIN terms t ON t.terms = $3
JOIN shipVia s ON s.ship_via = $4
WHERE r.rep = $5
RETURNING id
)
INSERT INTO invoiceItems (invoice_id, name, qty, price, description)
(
SELECT i.id, $27, $28, $29, $30 FROM new_invoice i
UNION ALL
SELECT i.id, $31, $32, $33, $34 FROM new_invoice i
UNION ALL
SELECT i.id, $35, $36, $37, $38 FROM new_invoice i
);
Here is what I get:
ERROR: column "qty" is of type integer but expression is of type text
LINE 15: SELECT i.id, $27, $28, $29, $30 FROM new_invoice i
^
HINT: You will need to rewrite or cast the expression.
********** Error **********
ERROR: column "qty" is of type integer but expression is of type text
SQL state: 42804
Hint: You will need to rewrite or cast the expression.
Character: 759
Seems like the same error. It is interesting that if I remove all the UNION ALL and leave just one SELECT statement - it works!
EDIT 3
Why do I have to cast the parameters? Is it possible to specify the type of columns in the CTE?
PostgreSQL has such an extended interpretation of the VALUES clause that it may be used as a subquery by itself.
So you may express your query in this form:
WITH new_invoice AS (
INSERT INTO ...
RETURNING id
),
v(a,b,c,d) AS (values
($27,$28,$29,$30),
($31,$32,$33,$34),
...
)
INSERT INTO invoiceItems (invoice_id, name, qty, price, description)
SELECT new_invoice.id, a,b,c,d FROM v, new_invoice;
That assumes you want to insert the cartesian product of new_invoice and the values, which mostly makes sense if new_invoice is actually a single-row value.
WITH new_invoice AS (
INSERT INTO invoices ...
RETURNING id
)
INSERT INTO invoiceItems (invoice_id, name, qty, price, description)
VALUES ((select id from new_invoice), $27 , $28, $29, $30),
((select id from new_invoice), $31 , $32, $33, $34),
((select id from new_invoice), $35 , $36, $37, $38);
Instead of insert ... values ...., use insert ... select ...:
) INSERT INTO invoiceItems (invoice_id, name, qty, price, description)
SELECT new_invoice.id,$27,$28,$29,$30 FROM new_invoice
UNION ALL
...

Select value from an enumerated list in PostgreSQL

I want to select from an enumaration that is not in database.
E.g. SELECT id FROM my_table returns values like 1, 2, 3
I want to display 1 -> 'chocolate', 2 -> 'coconut', 3 -> 'pizza' etc. SELECT CASE works but is too complicated and hard to overview for many values. I think of something like
SELECT id, array['chocolate','coconut','pizza'][id] FROM my_table
But I couldn't succeed with arrays. Is there an easy solution? So this is a simple query, not a plpgsql script or something like that.
with food (fid, name) as (
values
(1, 'chocolate'),
(2, 'coconut'),
(3, 'pizza')
)
select t.id, f.name
from my_table t
join food f on f.fid = t.id;
or without a CTE (but using the same idea):
select t.id, f.name
from my_table t
join (
values
(1, 'chocolate'),
(2, 'coconut'),
(3, 'pizza')
) f (fid, name) on f.fid = t.id;
This is the correct syntax:
SELECT id, (array['chocolate','coconut','pizza'])[id] FROM my_table
But you should create a referenced table with those values.
What about creating another table that enumerate all cases, and do join ?
CREATE TABLE table_case
(
case_id bigserial NOT NULL,
case_name character varying,
CONSTRAINT table_case_pkey PRIMARY KEY (case_id)
)
WITH (
OIDS=FALSE
);
and when you select from your table:
SELECT id, case_name FROM my_table
inner join table_case on case_id=my_table_id;