SQLAlchemy Core CREATE TEMPORARY TABLE AS - postgresql

I'm trying to use the PostgreSQL CREATE TEMPORARY TABLE foo AS SELECT... query in SQLAlchemy Core. I've looked through the docs but don't see a way to do this.
I have a SQLA statement object. How do I create a temporary table from its results?

This is what I came up with. Please tell me if this is the wrong way to do it.
from sqlalchemy.sql import Select
from sqlalchemy.ext.compiler import compiles
class CreateTableAs(Select):
"""Create a CREATE TABLE AS SELECT ... statement."""
def __init__(self, columns, new_table_name, is_temporary=False,
on_commit_delete_rows=False, on_commit_drop=False, *arg, **kw):
"""By default the table sticks around after the transaction. You can
change this behavior using the `on_commit_delete_rows` or
`on_commit_drop` arguments.
:param on_commit_delete_rows: All rows in the temporary table will be
deleted at the end of each transaction block.
:param on_commit_drop: The temporary table will be dropped at the end
of the transaction block.
"""
super(CreateTableAs, self).__init__(columns, *arg, **kw)
self.is_temporary = is_temporary
self.new_table_name = new_table_name
self.on_commit_delete_rows = on_commit_delete_rows
self.on_commit_drop = on_commit_drop
#compiles(CreateTableAs)
def s_create_table_as(element, compiler, **kw):
"""Compile the statement."""
text = compiler.visit_select(element)
spec = ['CREATE', 'TABLE', element.new_table_name, 'AS SELECT']
if element.is_temporary:
spec.insert(1, 'TEMPORARY')
on_commit = None
if element.on_commit_delete_rows:
on_commit = 'ON COMMIT DELETE ROWS'
elif element.on_commit_drop:
on_commit = 'ON COMMIT DROP'
if on_commit:
spec.insert(len(spec)-1, on_commit)
text = text.replace('SELECT', ' '.join(spec))
return text

Related

Retrieve data when clicking a button

Trying to receive input from WCT_Control into WCT_DataPull
Cant figure out how to get the data into WCT_DataPull to perform an action with it. I think I am going about this backwards, but I also think I have been staring at it too long.
Essentially, the user enters the information necessary into a GUI to connect to a specific SQL table (predetermined) and then saves the data in the table and outputs it as a csv file backup.
I want the user to click the submit button and that creates the backup. However, at this point when I click the button, it will store all the data in the variables (If I put a print statement in I see the correct values), but I cant seem to figure out how to get the variables to WCT_DataPull, where the backup creation action is performed.
WCT_Control
from PyQt5.QtWidgets import *
from WCT_View import Ui_MainWindow
class Controller(QMainWindow, Ui_MainWindow):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setupUi(self)
self.run.clicked.connect(lambda : self.submit())
def submit(self):
self.run.clicked.connect()
server = self.server_entry.text()
database = self.data_entry.text()
station = self.station_entry.text()
app = self.app_entry.text()
backup_name = self.filename_entry.text()
self.server_entry.setText('')
self.data_entry.setText('')
self.station_entry.setText('')
self.app_entry.setText('')
self.filename_entry.setText('')
return server, database, station, app, backup_name
WCT_DataPull
from WCT_Control import *
import pyodbc
import csv
pull_data = Controller()
def write_bak():
driver = 'ODBC Driver 17 for SQL Server'
serv, data, stat, app, bak_name = pull_data.submit()
conn = pyodbc.connect('DRIVER={0};SERVER={1};DATABASE={2};Trusted_Connection=yes'.format(driver, serv, data))
cursor = conn.cursor()
rows = cursor.execute("""
select DnsName, PackageName, Code, Value from WorkstationApplicationSettings
where DnsName=? and PackageName=?
""", stat, app).fetchall()
for row in rows:
print(row.PackageName,':', row.Code, ':', row.Value)
with open(bak_name, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerows(rows)
So you just have to do the things in opposite way, instead of using PYQT5 in WCT script, use the WCT function in PYQT5 script
WCT_Control
from PyQt5.QtWidgets import *
from WCT_DataPull import write_bak
class Controller(QMainWindow, Ui_MainWindow):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setupUi(self)
self.run.clicked.connect(lambda : self.submit())
def submit(self):
self.run.clicked.connect()
server = self.server_entry.text()
database = self.data_entry.text()
station = self.station_entry.text()
app = self.app_entry.text()
backup_name = self.filename_entry.text()
self.server_entry.setText('')
self.data_entry.setText('')
self.station_entry.setText('')
self.app_entry.setText('')
self.filename_entry.setText('')
write_bak(serv, data, stat, app, bak_name)
WCT_DataPull
import pyodbc
import csv
def write_bak(serv, data, stat, app, bak_name):
driver = 'ODBC Driver 17 for SQL Server'
conn = pyodbc.connect('DRIVER={0};SERVER={1};DATABASE={2};Trusted_Connection=yes'.format(driver, serv, data))
cursor = conn.cursor()
rows = cursor.execute("""
select DnsName, PackageName, Code, Value from WorkstationApplicationSettings
where DnsName=? and PackageName=?
""", stat, app).fetchall()
for row in rows:
print(row.PackageName,':', row.Code, ':', row.Value)
with open(bak_name, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerows(rows)
Also Make sure to write the code to run the WCT_control by using Qapplication

SQLAlchemy Asyncio ORM Unable to Query Database When Retrieving Tables and Columns from MetaData

Using SQLAlchemy async ORM 1.4, Postgres backend, Python 3.7
I am using an augmented Declarative Base with the SA ORM. The tables are not held in models.py but are committed directly to the database by parsing a JSON script that contains all the table schemas. Because of this, I can't import the models at the top of the script like from models import ThisTable.
So to work with CRUD operations on the tables, I first retrieve them by reflecting the metadata.
In the 'usual' way, when importing all the tables at the top of the script, a query like this works:
result = await s.execute(select(func.sum(TableName.column)))
curr = result.all()
When I try to reflect the table and column objects from the MetaData in order to query them, this doesn't work. There are lots of AttributeError: 'Table' object has no attribute 'func' or TypeError: 'Table' object is not callableerrors.
def retrieve_table_obj(table):
meta = MetaData()
meta.reflect(bind=sync_engine)
return meta.tables[table]
def retrieve_table_cols(self, table):
table = retrieve_table_obj('users')
return table.columns.keys()
async def reading(collection, modifications):
table = db.retrieve_table_obj(collection)
columns = db.retrieve_table_cols(collection)
for c in columns:
for f in mods['fields']:
if c in f:
q = select(func.sum(table.c))
result = await s.execute(q)
curr = result.all()
asyncio.run(reading("users", {'fields': ["usage", "allowance"]}))
How can I query tables and columns in the database when they first have to be explicitly retrieved?
The automap extension can be used to automatically reflect database tables to SQLAlchemy models. However automap uses inspect on the engine, and this isn't supported on async engines; we can work around this by doing the automapping using a synchronous engine. Once the models have been mapped they can be used by the async engine.
For example:
import asyncio
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.ext.automap import automap_base
sync_engine = sa.create_engine('postgresql:///test', echo=True, future=True)
Base = automap_base()
Base.prepare(sync_engine, reflect=True)
async def async_main(collection, modifications):
engine = create_async_engine(
"postgresql+asyncpg:///test",
echo=True,
future=True,
connect_args={'ssl': False},
)
async_session = orm.sessionmaker(
engine, class_=AsyncSession, future=True
)
async with async_session() as session:
model = Base.classes[collection]
matches = set(model.__mapper__.columns.keys()) & set(modifications['fields'])
for m in matches:
q = sa.select(sa.func.sum(getattr(model, m)))
result = await session.execute(q)
curr = result.all()
for row in curr:
print(row)
print()
# for AsyncEngine created in function scope, close and
# clean-up pooled connections
await engine.dispose()
asyncio.run(reading("users", {'fields': ["usage", "allowance"]}))
If you don't need models, caching the MetaData object rather than recreating it on every call to retrieve_table_obj would make the existing code more efficient, and replacing select(func.sum(table.c)) with select(sa.func.sum(getattr(table.c, c)))

TabularAdapter customization/notifications?

Thanks to another user here on SO (Warren Weckesser), I found a nice way to format my TabularAdapter columns. There are some other customizations I'd like to accomplish, so I thought I'd put this out to SO to see if I can get more help.
The following code puts up a couple of TabularAdapter tables in the format that I want to use. What I'd like to be able to do are 2 things:
I'd like to set the first column as non-editable. I've found how to set a row to non-editable, but not a column -- is this possible?
What I'd really like (even more than #1 above) it to get a notification if one of the values in any of my columns changes! I've heard that there are some 'tweaks' that can be done with numpy arrays to accomplish this, but I'm way too inexperienced yet to pull this off. Is there any TraitsAdapter mentods that might be used to accomplish this feat?
Here's my code so far (thanks to Warren's modifications):
from traits.api import HasTraits, Array, Str
from traitsui.api import View, Item, TabularEditor
from traitsui.tabular_adapter import TabularAdapter
from numpy import dtype
test_dtype = dtype([('Integer#1', 'int'),
('Integer#2', 'int'),
('Float', 'float')])
class TestArrayAdapter1(TabularAdapter):
columns = [('Col1 #', 0), ('Col2', 1), ('Col3', 2)]
even_bg_color = 0xf4f4f4 # very light gray
width = 125
def get_format(self, object, name, row, column):
formats = ['%d', '%d', '%.4f']
return formats[column]
class TestArrayAdapter2(TabularAdapter):
columns = [('Col1 #', 0), ('Col2', 1), ('Col3', 2)]
even_bg_color = 0xf4f4f4 # very light gray
width = 125
object_0_format = Str("%d")
object_1_format = Str("%d")
object_2_format = Str("%.4f")
class Test(HasTraits):
test_array = Array(dtype=test_dtype)
view = \
View(
Item(name='test_array', show_label=False,
editor=TabularEditor(adapter=TestArrayAdapter1())),
Item(name='test_array', show_label=False,
editor=TabularEditor(adapter=TestArrayAdapter2())),
)
test = Test()
test.test_array.resize(5, refcheck=False)
test.configure_traits()
For your item #2, after talking to Enthought folks, I confirmed there isn't an official way to do this yet but:
I created a ticket for it: https://github.com/enthought/traitsui/issues/387
I worked around the issue, by keeping a handle on the ArrayAdapter, subclass it, and override the set_text method like so:
.
class NotifyingArrayAdapter(ArrayAdapter):
value_changed = Event
def set_text(self, object, trait, row, column, text):
super(NotifyingArrayAdapter, self).set_text(object, trait, row,
column, text)
self.value_changed = True
That way, I can just listen to the value_changed event, and do what I need with it.
You can get fancier, and make the event be a more complex object, for example storing information about the old/new values, and the row and column changed:
class ArrayAdapterEvent(HasStrictTraits):
row = Int
column = Int
old = Str
new = Str
class NotifyingArrayAdapter(ArrayAdapter):
value_changed = Event(Instance(ArrayAdapterEvent))
def set_text(self, object, trait, row, column, text):
old = self.get_text(object, trait, row, column)
super(NotifyingArrayAdapter, self).set_text(object, trait, row,
column, text)
event = ArrayAdapterEvent(old=old, new=text, row=row, column=column)
self.value_changed = event

VB Script in Access Variable not found

So I was advised that I could create some copy replace functionality to this form.
Here is my coding attempt in VB:
First I connect to DB using DAO. Then I use a SELECT statement that has been verified to pull the last record inserted into the DB. Then I try to refill the controls with the values from the query but I am getting reference errors.
Private Sub AutoFill_Click()
Dim db As DAO.Database, rs As DAO.Recordset
Dim strSQL As String
Set db = CurrentDb()
strSQL = "SELECT DISTINCTROW TOP 1 CPOrders.Cust, Customer.NAME, CPOrders.CP_Ref, CPOrders.Slsman, CPOrders.Date_opn, CPOrders.CPSmall, CPOrders.InvIssu, CPOrders.InvNo, CPOrders.InvDate, CPOrders.DueDate, CPOrders.ETADate, CPOrders.Closed, CPOrders.BuyerRef, CPOrders.ToCity, CPOrders.ToState, CPOrders.ToCtry, CPOrders.ToPort, CPOrders.Supplier, CPOrders.Origin, CPOrders.Product, CPOrders.GradeType, CPOrders.NoUnits, CPOrders.Pkg, CPOrders.Qty, CPOrders.TotSale, CPOrders.TotCost, CPOrders.GrMargin, CPOrders.[Sale$/Unit], CPOrders.[Cost$/Unit], CPOrders.OceanCost, CPOrders.OceanNotes, CPOrders.BLadingDate, CPOrders.USAPort, CPOrders.FOBCost, CPOrders.FASExportVal, CPOrders.InlandFrt, CPOrders.CommodCode, CPOrders.Notes FROM Customer INNER JOIN CPOrders ON Customer.[CUST_#] = CPOrders.Cust ORDER BY CPOrders.CP_Ref desc;"
Set rs = db.OpenRecordset(strSQL, dbOpenDynaset, dbReadOnly)
rs.MoveFirst
CP_Ref.ControlSource = rs!CP_Ref
Slsman.ControlSource = rs!Slsman
CPSmall.ControlSource = rs!CPSmall
InvIssu.ControlSource = rs!InvIssu
InvDate.ControlSource = rs!InvDate
DueDate.ControlSource = rs!DueDate
Closed.ControlSource = rs!Closed
rs.Close
db.Close
The control source reference picks up and autocompletes the word.
I would think that as it stands. although i'm not filling all the values with records from my SELECT statement that it would populate but instead i get things like #NAME? where the values should be. I also get a break in my code and it says "Invalid use of null"
Why? I appreciate your guys input and I can provider screenshots if necessary. I think this is involving the reference tie, but I'm not sure. Any help is much appreciated.
You are using the field names from the SELECT statement as if they were variables.
CP_Ref.ControlSource = rs("CP_Ref")
Slsman.ControlSource = rs("Slsman")
CPSmall.ControlSource = rs("CPSmall")
InvIssu.ControlSource = rs("InvIssu")
InvDate.ControlSource = rs("InvDate")
DueDate.ControlSource = rs("DueDate")
Closed.ControlSource = rs("Closed")
When you have that worked out, tackle the "Invalid use of null" problem by first identifying any fields that could potentially be NULL and using something like
SELECT Iif(IsNull([InvDate]), '', [InvDate]) As [InvDate], ...
in the SELECT statement to pass across a minimum of an empty string rather than a NULL value.

SQLALchemy "after_insert" doesn't update target object fields

I have a model (see code below) on which I want to execute a function after an object is inserted that will update one of the object's fields. I'm using the after_insert Mapper Event to do this.
I've confirmed that the after_insert properly calls the event_extract_audio_text() handler, and the target is getting updated with the correct audio_text value. However, once the event handler finishes executing, the text value is not set for the object in the database.
Code
# Event handler
def event_extract_audio_text(mapper, connect, target):
# Extract text from audio file
audio_text = compute_text_from_audio_file(target.filename)
# Update the 'text' field with extracted text
target.audio_text = audio_text
# Model
class SoundsRaw(db.Model):
__tablename__ = 'soundsraw'
id = db.Column(db.BigInteger(), primary_key=True, autoincrement=True)
filename = db.Column(db.String(255))
audio_text = db.Column(db.Text())
# Event listener
event.listen(SoundsRaw, 'after_insert', event_extract_audio_text)
I've also tried calling db.session.commit() to try to update the object with the text value, but then I get the following stack trace:
File "/Users/alexmarse/.virtualenvs/techmuseum/lib/python2.7/site-packages/sqlalchemy/orm/session.py", line 219, in _assert_active
raise sa_exc.ResourceClosedError(closed_msg)
ResourceClosedError: This transaction is closed
Any ideas?
Software versions
SQLAlchemy 0.9.4
Flask 0.10.1
Flask-SQLAlchemy 1.0
The thing with 'after_insert' kind of handlers is to use the connection directly. Here's how I did it:
class Link(db.Model):
"News link data."
__tablename__ = 'news_links'
id = db.Column(db.BigInteger, primary_key=True)
slug = db.Column(db.String, unique=True) #, nullable=False
url = db.Column(db.String, nullable=False, unique=True)
title = db.Column(db.String)
image_url = db.Column(db.String)
description = db.Column(db.String)
#db.event.listens_for(Link, "after_insert")
def after_insert(mapper, connection, target):
link_table = Link.__table__
if target.slug is None:
connection.execute(
link_table.update().
where(link_table.c.id==target.id).
values(slug=slugify(target.id))
)
I ended up solving this by ditching the Mapper Event approach and using Flask's Signalling Support instead.
Basically, you can register "signals" on your model, which are essentially callback functions that are called whenever a specific kind of event happens. In my case, the event is an "update" on my model.
To configure the signals, I added this method to my app.py file:
def on_models_committed(sender, changes):
"""Handler for model change signals"""
for model, change in changes:
if change == 'insert' and hasattr(model, '__commit_insert__'):
model.__commit_insert__()
if change == 'update' and hasattr(model, '__commit_update__'):
model.__commit_update__()
if change == 'delete' and hasattr(model, '__commit_delete__'):
model.__commit_delete__()
Then, on my model, I added this function to handle the update event:
# Event methods
def __commit_update__(self):
# create a new db session, which avoids the ResourceClosedError
session = create_db_session()
from techmuseum.modules.sensors.models import SoundsRaw
# Get the SoundsRaw record by uuid (self contains the object being updated,
# but we can't just update/commit self -- we'd get a ResourceClosedError)
sound = session.query(SoundsRaw).filter_by(uuid=self.uuid).first()
# Extract text from audio file
audio_text = compute_text_from_audio_file(sound)
# Update the 'text' field of the sound
sound.text = audio_text
# Commit the update to the sound
session.add(sound)
session.commit()
def create_db_session():
# create a new Session
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
psql_url = app.config['SQLALCHEMY_DATABASE_URI']
some_engine = create_engine(psql_url)
# create a configured "Session" class
session = sessionmaker(bind=some_engine)
return session