how to update foreign keys in a different table after foreign key element was created in postgres with sqlalchemy - postgresql

I have 2 tables with a one to many relationship. Parent can have many children, a child can only have one parent.
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Parent(Base):
__tablename__ = "parent"
id = Column(UUID(as_uuid=True), primary_key=True, index=True, nullable=False, default=uuid.uuid4)
children_ids = Column(postgres.ARRAY(Text), server_default=postgres.array("{}"))
class Child(Base):
__tablename__ = "child"
id = Column(UUID(as_uuid=True), primary_key=True, index=True, default=uuid.uuid4)
parent_id = Column(UUID, ForeignKey('Parent.id', ondelete='CASCADE'), nullable=False)
parent = relationship("Parent", backref=backref("child", passive_deletes=True, passive_updates=True))
I currently create a child entity with the session maker and use add() and commit(). I then have to manually update the parent entity and push/append the Child.id into the children_ids column. Is there a way how sqlalchemy can trigger a manual update or do I always have to manually update?

Related

SQLAlchemy Joined Table Inheritance: Problem adding objects

I'm having problems trying to add inherited objects with SQLAlchemy following the instructions in https://docs.sqlalchemy.org/en/14/orm/inheritance.html to implement Joined Table Inheritance.
I'm using a PostgreSQL version 14 as a database engine.
Here is my Base configuration:
import os
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.automap import automap_base
db_string = os.environ['DB_STRING']
engine = create_engine(db_string)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = automap_base()
Base.query = db_session.query_property()
Base.prepare(engine, reflect=True)
Here is Instrument (parent class) definition:
from sqlalchemy import Column, String
from context_model.database import Base
class Instrument(Base):
__tablename__ = "instrument"
_id = Column(String, primary_key=True)
discriminator = Column(String)
__mapper_args__ = {
'polymorphic_identity': 'instrument',
'polymorphic_on': discriminator
Here is Bond (child class) definition:
import datetime as dt
from sqlalchemy import Column, Integer, Float, String, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from context_model.instrument.instrument import Instrument
class Bond(Instrument):
__tablename__ = "bond"
_id = Column(String, ForeignKey("instrument._id"), primary_key=True)
_provider_bbg_id = Column(String)
__mapper_args__ = {
'polymorphic_identity': 'bond'
}
When I try to add a Bond instance and persist it to the database:
Bond = Base.classes.bond
bond = Bond()
bond._id = "0"
bond._provider_bbg_id = "XXX"
db_session.add(bond)
db_session.commit()
appears the following error message:
sqlalchemy.exc.IntegrityError: (psycopg2.errors.ForeignKeyViolation) insert or update on table "bond" violates foreign key constraint "bond__id_fkey"
DETAIL: Key (_id)=(0) is not present in table "instrument"."
It seems to me that the inheritance is not working for some reason, did I define well the Instrument (parent) and Bond (child) classes ?, maybe do I need to use another type of Base ? (I'm using automap_base )
Thanks in advance for your help!
I don't know the exact reasons but I changed from declarative_base() to automap_base() as #snakecharmerb suggested in the commentaries and now it works :)

Conflicts with relationship between tables

I've been constantly getting a warning on the console and I'm going crazy from how much I've been reading but I haven't been able to resolve this:
SAWarning: relationship 'Book.users' will copy column user.uid to column user_book.uid, which conflicts with relationship(s): 'User.books' (copies user.uid to user_book.uid). If this is not intention, consider if these relationships should be linked with back_populates, or if viewonly=True should be applied to one or more if they are read-only. For the less common case that foreign key constraints are partially overlapping, the orm.foreign() annotation can be used to isolate the columns that should be written towards. The 'overlaps' parameter may be used to remove this warning.
The tables the console cites in this notice are as follows:
user_book = db.Table('user_book',
db.Column('uid', db.Integer, db.ForeignKey('user.uid'), primary_key=True),
db.Column('bid', db.Text, db.ForeignKey('book.bid'), primary_key=True),
db.Column('date_added', db.DateTime(timezone=True), server_default=db.func.now())
)
class User(db.Model):
__tablename__ = 'user'
uid = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(25), nullable=False)
hash = db.Column(db.String(), nullable=False)
first_name = db.Column(db.String(30), nullable=True)
last_name = db.Column(db.String(80), nullable=True)
books = db.relationship('Book', secondary=user_book)
class Book(db.Model):
__tablename__ = 'book'
bid = db.Column(db.Text, primary_key=True)
title = db.Column(db.Text, nullable=False)
authors = db.Column(db.Text, nullable=False)
thumbnail = db.Column(db.Text, nullable=True)
users = db.relationship('User', secondary=user_book)
I use the user_book table to show the user the books he has added.
What am I missing? I take this opportunity to ask, semantically the relationship between tables and foreign keys is being done correctly?
As the warning message suggests, you are missing the back_populates= attributes in your relationships:
class User(db.Model):
# …
books = db.relationship('Book', secondary=user_book, back_populates="users")
# …
class Book(db.Model):
# …
users = db.relationship('User', secondary=user_book, back_populates="books")
# …
I kind of figure this out.
As the code in official tutorial.
from sqlalchemy import Column, ForeignKey, Integer, String, Table
from sqlalchemy.orm import declarative_base, relationship
Base = declarative_base()
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
name = Column(String(64))
kw = relationship("Keyword", secondary=lambda: user_keyword_table)
def __init__(self, name):
self.name = name
class Keyword(Base):
__tablename__ = "keyword"
id = Column(Integer, primary_key=True)
keyword = Column("keyword", String(64))
def __init__(self, keyword):
self.keyword = keyword
user_keyword_table = Table(
"user_keyword",
Base.metadata,
Column("user_id", Integer, ForeignKey("user.id"), primary_key=True),
Column("keyword_id", Integer, ForeignKey("keyword.id"), primary_key=True),
)
Doesn't it make you wander why the relationship only exists in User class rather than both class ?
The thing is, it automatically creates the reverse relationship in Keyword class (a "backref='users' liked parameter is required I supposed ?)

Two-level join Sqlalchemy

I have three Model classes, representing three tables in my PostgreSQL database: Project, Label, ProjectLabel. Many projects can have multiple labels:
class Project(db.Model):
__tablename__ = 'projects'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String())
labels = db.relationship('ProjectLabel')
class Label(db.Model):
__tablename__ = 'labels'
label_id = db.Column(db.Integer, primary_key=True)
label_name = db.Column(db.String())
class ProjectLabel(db.Model):
__tablename__ = 'projects_labels'
projectlabel_id = db.Column(db.Integer, primary_key=True)
projectlabel_projectid = db.Column(db.Integer, db.ForeignKey('projects.id'))
projectlabel_labelid = db.Column(db.Integer, db.ForeignKey('labels.label_id'))
How can I query Project model, so that I can get objects from labels table?
Specifically, how can I get label_name of the label assigned to the Project? I somehow need to connect between Project(labels) -> ProjectLabel -> Label classes
This will get the related labels in long form:
db.session.query(Project.id,
Label.label_name)\
.filter(ProjectLabel.projectlabel_projectid==Project.id)\
.filter(Label.label_id==ProjectLabel.projectlabel_labelid)\
.order_by(Project.id.asc()).all()
If you want the labels in comma-delimited lists use func.group_concat():
db.session.query(Project.id,
func.group_concat(Label.label_name).label('related_labels'))\
.filter(ProjectLabel.projectlabel_projectid==Project.id)\
.filter(Label.label_id==ProjectLabel.projectlabel_labelid)\
.group_by(Project.id)\
.order_by(Project.id.asc()).all()

Is there a way of viewing the columns for relationships within pgAdmin?

I've begun populating the following tables inside my database:
class ModelItem(Base):
__tablename__ = 'item'
name = Column('name', String, primary_key=True)
set_id = Column(String, ForeignKey('set.name'))
class ModelSet(Base):
__tablename__ = 'set'
name = Column('name', String, primary_key=True)
items = relationship('ModelItem', backref='set')
Everything seems to be working fine since I can query the children of the parent record and come up with the expected data within my code. I'm just wondering if there's a way to see that same items column in pgAdmin like I can with all the other columns for the parent table

Return set of unique values from multiple rows of arrays in sqlalchemy

I have 2 tables, one called Tasks and another called TaskUpdates (one to many relation). TaskUpdates has a column called tags, which is an array. I am trying to get back an array that has only unique values from the TaskUpdates.tags
class Task(BASE):
__tablename__ = 'tasks'
id = Column(Integer, primary_key=True)
# One Task to Many Updates
updates = relationship("TaskUpdate")
class TaskUpdate(BASE):
__tablename__ = 'task_updates'
# Columns
id = Column(Integer, primary_key=True)
tags = Column(ARRAY(String(255)))
task_id = Column(Integer, ForeignKey('tasks.id'))
task = relationship('Task', back_populates="updates")