AsyncGraphMachine used with async on_enter callbacks - pytransitions

My question is actually pretty simple, but I am not well versed in pytransition code to find the answer by myself:
Is is possible to use AsyncGraphMachine with async on_enter callbacks only.
In the documentation: https://github.com/pytransitions/transitions#-using-async-callbacks
it is stated that:
If you are using Python 3.7 or later, you can use AsyncMachine to work
with asynchronous callbacks. You can mix synchronous and asynchronous
callbacks if you like but this may have undesired side effects. Note
that events need to be awaited and the event loop must also be handled
by you.
I am using python > 3.7, and I cannot make my samples to work:
from transitions.extensions.asyncio import AsyncMachine
import asyncio
import time
from transitions import State
class AsyncModel:
async def do_async(self):
print("Do async start")
await asyncio.sleep(5)
print("Do async end")
def do_sync(self):
print("Do async start")
time.sleep(5)
print("Do async end")
transitions = [dict(trigger="start", source="*", dest="initialization"),
dict(trigger="finish", source="initialization", dest="final")]
states = [State(name="initialization", on_enter=["do_async", "do_sync"]),
State(name="final", on_enter=["do_async", "do_sync"])]
model = AsyncModel()
machine = AsyncMachine(model, states=states, transitions=transitions)
asyncio.get_event_loop().run_until_complete(model.start())
The actual error is:
/home/user/.local/lib/python3.8/site-packages/transitions/core.py:128: RuntimeWarning: coroutine 'AsyncMachine.callbacks' was never awaited
event_data.machine.callbacks(self.on_enter, event_data) RuntimeWarning: Enable tracemalloc to get the object allocation traceback Traceback (most recent call last):
File "./test.py", line 27, in <module>
asyncio.get_event_loop().run_until_complete(model.start())
File "/usr/lib/python3.8/asyncio/base_events.py", line 616, in run_until_complete
return future.result()
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 177, in trigger
return await self.machine.process_context(func, _model)
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 409, in process_context
res = await self._process(func, model)
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 443, in _process
return await trigger()
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 192, in _trigger
return await self._process(event_data)
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 201, in _process
if await trans.execute(event_data):
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 130, in execute
await self._change_state(event_data)
File "/home/user/.local/lib/python3.8/site-packages/transitions/extensions/asyncio.py", line 145, in _change_state
await event_data.machine.get_state(self.dest).enter(event_data) TypeError: object NoneType can't be used in 'await' expression
However, on_enter type of function are callbacks, according to the documentation itself: https://github.com/pytransitions/transitions#state-callbacks
So I wonder what is going on here.
Thank you in advance for your help
EDIT: ok the problem might even be somewhere I wouldn't even have considered:
from transitions.extensions.asyncio import AsyncMachine
import asyncio
import time
from transitions import State
class AsyncModel:
async def do_async(self):
print("Do async start")
await asyncio.sleep(5)
print("Do async end")
def do_sync(self):
print("Do sync start")
time.sleep(5)
print("Do sync end")
transitions = [dict(trigger="start", source="initialization", dest="final", before=["do_async", "do_sync"], after=["do_sync"], prepare=["do_sync"])]
states = [State(name="initialization"),
State(name="final")]
model = AsyncModel()
machine = AsyncMachine(model, states=["initialization", "final"], transitions=transitions, initial="initialization")
asyncio.get_event_loop().run_until_complete(model.start())
This is working, however this is failing:
from transitions.extensions.asyncio import AsyncMachine
import asyncio
import time
from transitions import State
class AsyncModel:
async def do_async(self):
print("Do async start")
await asyncio.sleep(5)
print("Do async end")
def do_sync(self):
print("Do sync start")
time.sleep(5)
print("Do sync end")
transitions = [dict(trigger="start", source="initialization", dest="final", before=["do_async", "do_sync"], after=["do_sync"], prepare=["do_sync"])]
states = [State(name="initialization"),
State(name="final")]
model = AsyncModel()
machine = AsyncMachine(model, states=states, transitions=transitions, initial="initialization")
asyncio.get_event_loop().run_until_complete(model.start())
Looks like vanilla states from transitions are not compatible with async machines, you should rather use "from transitions.extensions.asyncio import AsyncState" instead

Ok so actually I found the answer by trial and error, my issue was that I was using State instead of AsyncState

Related

Pytest asyncio - run multiple tests gives error regarding event loop

I am using pytest with pytest-asyncio to run async tests. What is strange is that these tests only work if there is exactly one of them. For example:
#pytest.mark.asyncio
async def test_foo():
client = await get_db_connection()
user = await client.DO_QUERY
response = await FunctionUnderTest(
db=client, ...args
)
assert response.id is not None
#pytest.mark.asyncio
async def test_error_foo():
client = await get_db_connection()
with pytest.raises(MissingRequiredError):
await FunctionUnderTest(
db=client, ...args
)
If I comment out either of those tests, the remaining one will pass, but running both together gives:
RuntimeError: Task <Task pending name='Task-5' coro=<test_error_foo() running at /tests/test_function_under_test.py:44> cb=[_run_until_complete_cb() at /usr/lib/python3.10/asyncio/base_events.py:184]> got Future <Future pending> attached to a different loop
I would have expected pytest-asyncio to create a single event loop and run all the tests sequentially, but this does not seem to work.

How to create a websocket manager in Solana-py

I am using Python v3.7 + solana v0.21.0 and trying to create a websocket manager handling several different subscriptions, but the code seems to block whenever websocket.recv()
or asyncstdlib.enumerate(websocket) is called. Examples from the docs:
import asyncio
from asyncstdlib import enumerate
from solana.rpc.websocket_api import connect
async def main():
# First example using websocket.recv()
async with connect("wss://api.devnet.solana.com") as websocket:
await websocket.logs_subscribe()
first_resp = await websocket.recv()
subscription_id = first_resp.result
next_resp = await websocket.recv() <--- Blocks until msg received.
print(next_resp)
await websocket.logs_unsubscribe(subscription_id)
# Second example using client as an infinite asynchronous iterator:
async with connect("wss://api.devnet.solana.com") as websocket:
await websocket.logs_subscribe()
first_resp = await websocket.recv()
subscription_id = first_resp.result
async for idx, msg in enumerate(websocket): <--- Blocks until msg received.
if idx == 3:
break
print(msg)
await websocket.logs_unsubscribe(subscription_id)
asyncio.run(main())
The idea is being able to iterate an infinite loop so new subscriptions can be added to the websocket for instance:
from solana.rpc.request_builder import LogsSubscribeFilter
from solana.rpc.websocket_api import connect
from asgiref.sync import sync_to_async
from solana.publickey import PublicKey
from time import sleep
async def websocket_manager(rpc: str):
async with connect(rpc) as websocket:
while True:
active_pubkeys = await sync_to_async(get_my_active_pubkeys)()
if active_pubkeys:
# Add missing pubkeys
for pubkey in active_pubkeys:
if ws.get(pubkey) in websocket.subscriptions:
continue
print(f"Subscribe to {pubkey}")
await websocket.logs_subscribe(LogsSubscribeFilter.mentions(PublicKey(pubkey)))
first_resp = await websocket.recv()
ws[pubkey] = first_resp.result # Maps the pubkey to the subscription ID
# Delete non used subscriptions:
for non_used_pubkey in set(active_pubkeys) ^ set(ws.keys()):
if non_used_pubkey in ws:
print(f"Delete subscription for pubkey #{non_used_pubkey}")
websocket.account_unsubscribe(ws[non_used_pubkey])
ws.pop(non_used_pubkey)
# <-- HERE HOW TO ITERATE SUBSCRIPTIONS WITHOUT BLOCKING THE MANAGER????
sleep(30) # Sleep for 30 seconds
Would be it be safe using a new thread or a subprocess for reading the websocket messages so they don't block the main function?
I figured it out, just needed to use task = asyncio.create_task(my_msg_listener_function(websocket, ws)) to have another task running concurrently and taking care of the websocket messages.

pytest RuntimeError: Event loop is closed FastApi

I receive an error RuntimeError: Event loop is closed each time when i try to make more than one async call function inside my test. I already tried to use all suggestions on stackoverflow to rewrite event_loop fixture but nothing works. I wonder what i'm missing
Run test command: python -m pytest tests/ --asyncio-mode=auto
requirements.txt
pytest==7.1.2
pytest-asyncio==0.18.3
pytest-html==3.1.1
pytest-metadata==2.0.1
test.py
async def test_user(test_client_fast_api):
assert 200 == 200
request_first = test_client_fast_api.post( # works fine
"/first_route",
)
request_second = test_client_fast_api.post( # recieve RuntimeError: Event loop is closed
"/second_route",
)
conftest.py
#pytest.fixture()
def event_loop():
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
yield loop
loop.close()
It took me all afternoon to solve this problem.
I also try to succeed from other people's code, here is my code.
Add a file conftest.py to the directory where the test script is placed.
And write the following code.
import pytest
from main import app
from httpx import AsyncClient
#pytest.fixture(scope="session")
def anyio_backend():
return "asyncio"
#pytest.fixture(scope="session")
async def client():
async with AsyncClient(app=app, base_url="http://test") as client:
print("Client is ready")
yield client
And then write a test script test_xxx.py.
import pytest
from httpx import AsyncClient
#pytest.mark.anyio
async def test_run_not_exists_schedule(client: AsyncClient):
response = await client.get("/schedule/list")
assert response.status_code == 200
schedules = response.json()["data"]["schedules"]
schedules_exists = [i["id"] for i in schedules]
not_exists_id = max(schedules_exists) + 1
request_body = {"id": not_exists_id}
response = await client.put("/schedule/run_cycle", data=request_body)
assert response.status_code != 200
#pytest.mark.anyio
async def test_run_adfasdfw(client: AsyncClient):
response = await client.get("/schedule/list")
assert response.status_code == 200
schedules = response.json()["data"]["schedules"]
schedules_exists = [i["id"] for i in schedules]
not_exists_id = max(schedules_exists) + 1
request_body = {"id": not_exists_id}
response = await client.put("/schedule/run_cycle", data=request_body)
assert response.status_code != 200
This is the real test code for my own project. You can change it to your own.Finally, run in the project's terminal python -m pytest.If all goes well, it should be ok's.This may involve libraries that need to be installed.
pytest
httpx
Yeah wow I had a similar afternoon to your experience #Bai Jinge
This is the event loop fixture and TestClient pattern that worked for me:
from asyncio import get_event_loop
from unittest import TestCase
from async_asgi_testclient import TestClient
#pytest.fixture(scope="module")
def event_loop():
loop = get_event_loop()
yield loop
#pytest.mark.asyncio
async def test_example_test_case(self):
async with TestClient(app) as async_client:
response = await async_client.get(
"/api/v1/example",
query_string={"example": "param"},
)
assert response.status_code == HTTP_200_OK
Ref to relevant GitHub issue: https://github.com/tiangolo/fastapi/issues/2006#issuecomment-689611040
Please note - I could NOT figure our how to use Class based tests. Neither unittest.TestCase or asynctest.case.TestCase would work for me. pytest-asyncio docs (here) state that:
Test classes subclassing the standard unittest library are not supported, users are recommended to use unittest.IsolatedAsyncioTestCase or an async framework such as asynctest.

How do I use compute() with async functions?

My app is heavily (and happily) using async functions. However, I have become aware that they are affecting my UI performance because they are executed in the same thread. The accepted solution is to use an isolate.
However, when trying to re-engineer my app to use the compute() function in some places so that the code is run in an isolate, I have been unable to move most of my async code. Here is an example:
// in an async function
storyList = await compute(_isolateLoad, 0);
static Future<List<Story>> _isolateLoad(int dummy) async {
List<Story> storyList = List<Story>();
QuerySnapshot querySS = await _db.collection('stories')
.orderBy('lastedited', descending: true).limit(maxSearch).getDocuments();
for (DocumentSnapshot doc in querySS.documents) {
Story story = Story.fromMap(id: doc.documentID, map: doc.data);
storyList.add(story);
await story.getOwnerName();
}
return storyList;
}
Generates error:
exception in StoryRepository.loadStoriesStream: Exception: ServicesBinding.defaultBinaryMessenger was accessed before the binding was initialized.
I could execute the database functions outside the isolate, but that seems pointless as that's what takes the most time in the async function.
How do I execute the database calls inside an isolate when it appears that compute() cannot contain async functions?

aiohttp - How to save a persistent session in class namespace

I am trying to use aiohttp in one of my projects and struggling to figure out how to create a persistent aiohttp.ClientSession object. I have gone through the official aiohttp documentation but did not find it help in this context.
I have looked through other online forums and noticed that a lot has changed ever since aiohttp was created. In some examples on github, the aiohttp author is shown to be creating a ClientSession outside a coroutine functions (i.e. class Session: def __init__(self): self.session = aiohttp.ClientSession()). I also found that one should not create a ClientSession outside coroutine.
I have tried the following:
class Session:
def __init__(self):
self._session = None
async def create_session(self):
self._session = aiohttp.ClientSession()
async fetch(self, url):
if self._session is None:
await self.create_session()
async with self._session.get(url) as resp:
return await resp.text()
I am getting a lot of warning about UnclosedSession and connector. I also frequently get SSLError. I also noticed that 2 out of three calls gets hung and I have to CTRL+C to kill it.
With requests I can simply initialize the session object in __init__, but it's not as simple as this with aiohttp.
I do not see any issues if I use the following (which is what I see as example all over the place) but unfortunately here I end up creating ClientSession with every request.
def fetch(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
return await resp.text()
I can wrap aiohttp.ClientSession() in another function and use that as context-manager, but then too I would end up creating a new session object every time I call the wrapper function. I am trying to figure how to save a aiohttp.ClientSession in class namespace and reuse it.
Any help would be greatly appreciated.
Here is working example:
from aiohttp import ClientSession, TCPConnector
import asyncio
class CS:
_cs: ClientSession
def __init__(self):
self._cs = ClientSession(connector=TCPConnector(verify_ssl=False))
async def get(self, url):
async with self._cs.get(url) as resp:
return await resp.text()
async def close(self):
await self._cs.close()
async def func():
cs = CS()
print(await cs.get('https://google.com'))
await cs.close() # you must close session
loop = asyncio.get_event_loop()
loop.run_until_complete(func())
You can do it.
I implemented a way to share session when writing Django programs (using asgi).Use pid to mark the session of different processes, which is convenient for django to call between different processes.
After actual testing, I can directly call the shared session.
Django 3.2
uvicorn
aiohttp.py
import os
import asyncio
import aiohttp
import logging
session_list = {}
logger = logging.getLogger(__name__)
class Req:
#property
def set_session(self):
try:
loop = asyncio.get_running_loop()
except:
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
session = aiohttp.ClientSession(loop=loop)
session_list.update({os.getpid(): session})
return session
def __init__(self):
if session_list.get(os.getpid()):
self.session = session_list.get(os.getpid())
else:
self.session = self.set_session
async def test(self):
if session_list:
session = session_list.get(os.getpid())
if session and session.closed:
session_list.pop(os.getpid())
session = self.set_session
else:
session = self.set_session
if not session or session.loop.is_running():
session = self.set_session
logger.warning("session abnormal")
result = await session.get("http://httpbing.org/get")
print(result.status)
req = Req()
views.py
from django.http import HttpResponse
from django.shortcuts import render # noqa
from django.views.generic import View
from django.utils.decorators import classonlymethod
import asyncio
class TTT(View):
#classonlymethod
def as_view(cls, **initkwargs):
view = super().as_view(**initkwargs)
view._is_coroutine = asyncio.coroutines._is_coroutine
return view
async def get(self, request):
await req.test()
return HttpResponse("ok")