when I use thread and win.Dispatch("SAPI.SpVoice") to make a book reader app i meet this question - python-3.7

my question is when i try to pause the speaker for a few minutes and i didn't do anything it will give me an error. Then i resume it, but it doesn't work.
I guess it might be a matter of releasing win.Dispatch("SAPI.SpVoice") from occupation.(For example, after a few minutes of idle he will automatically release the occupation)
is there anyone who can help me!!!
the error is there
Traceback (most recent call last):
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python37\lib\threading.py", line 917, in _bootstrap_inner
self.run()
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python37\lib\threading.py", line 865, in run
self._target(*self._args, **self._kwargs)
File "C:/Users/Administrator/Desktop/Learn/readBook/mainUI.py", line 158, in say
self.speaks[self.points['剑仙在此']].Speak(info)
File "<COMObject SAPI.SpVoice>", line 3, in Speak
pywintypes.com_error: (-2147352567, '发生意外。', (0, None, '没有注册类\r\n', None, 0, -2147201018), None)```
this is a part of my code.
def next(self):
self.speaks[self.points['剑仙在此']].pause()
self.points['剑仙在此'] += 1
self.uploadPoint()
self.start()
def last(self):
self.speaks[self.points['剑仙在此']].pause()
self.points['剑仙在此'] -= 1
self.uploadPoint()
self.start()
def start(self):
self.flishPoint()
for title, value in self.info[str(self.points['剑仙在此'])].items():
contents = value.split()
contents.insert(0, title)
self.readBook(contents)
def paused(self):
print(self.speaks[self.points['剑仙在此']])
if self.now:
self.now = False
self.speaks[self.points['剑仙在此']].pause()
else:
self.now = True
self.speaks[self.points['剑仙在此']].resume()
def speedAdd(self):
self.speed += 1
self.speaks[self.points['剑仙在此']].Rate = self.speed
def speedSub(self):
self.speed -= 1
self.speaks[self.points['剑仙在此']].Rate = self.speed
def readBook(self, contents):
self.ui.plainTextEdit.clear()
info = ''
for line in contents:
info += line + '\n'
self.thread = Thread(target=self.say, args=(info,), daemon=True)
self.thread.start()
self.ui.plainTextEdit.appendPlainText(info)
def say(self, info):
pythoncom.CoInitialize()
self.speaks[self.points['剑仙在此']] = win.Dispatch("SAPI.SpVoice")
self.speaks[self.points['剑仙在此']].Rate = self.speed
print(self.speaks[self.points['剑仙在此']])
self.speaks[self.points['剑仙在此']].Speak(info)

Related

Python multiprocessing, can't pickle thread.lock (pymongo.Cursor)

First, let me assure you I read all the relevant answers and they don't work for me.
I am using multiprocessing Pool to parallelize my data creation. I am using Mongodb 5.0 and pymongo client.
As you can see I am initializing the mongo client in the worker as suggested by the available answers but still I get a :
TypeError: cannot pickle '_thread.lock' object
Exception ignored in: <function CommandCursor.__del__ at 0x7f96f6fff160>
Is there a way I can use multiprocessing with pymongo.Cursor ??
Any help will be appreciated
This is the function that calls the Pool
def get_all_valid_events(
event_criteria:str,
all_listings:List[str],
earnings:List[Dict[str,Any]],
days_around_earnings=0,
debug=False,
poolsize=10,
chunk_size=100,
lookback=30,
lookahead = 0
):
start = time.perf_counter()
listings = Manager().list(all_listings.copy())
valid_events = []
if debug:
for i in range(ceil(len(listings)/chunk_size)):
valid_events += get_valid_event_dates_by_listing(event_criteria,listings[i*chunk_size:(i+1)*chunk_size] , earnings, days_around_earnings,debug)
else:
payload = list()
for i in range(ceil(len(listings)/chunk_size)):
payload.append(
[
event_criteria,
listings[i*chunk_size:(i+1)*chunk_size],
earnings,
days_around_earnings,
debug,
lookback,
lookahead
]
)
with ThreadPool(poolsize) as pool:
valid_events = pool.starmap(get_valid_event_dates_by_listing, payload)
print(f"getting all valid true events took {time.perf_counter() - start} sec")
return valid_events
And this is the worker function:
def get_valid_event_dates_by_listing(
event_criteria:str,
listings:List[str],
earnings_list,
days_around_earnings=0,
debug=False,
lookback=30,
lookahead=0
) -> List[Tuple[Tuple[str, datetime], int]]:
#TODO: generalize event filter
start = time.perf_counter()
client = MongoClient()
db = client['stock_signals']
cursor_candles_by_listing = db.candles.find(
{'listing': {'$in': listings}},
{'_id':0, 'listing':1, 'date':1,'position':1, 'PD_BBANDS_6_lower':1, 'close':1, 'PD_BBANDS_6_upper':1}
)
candles = list(cursor_candles_by_listing)
df = pd.DataFrame(candles).dropna()
minimum_position_dict = dict(df.groupby('listing').min()['position']) # We need the minimum position by listing to filter only events that have lookback
# Filter only the dates that satisfy the criteria
lte_previous_bb_6_lower = df['close'] <= df[f"{event_criteria}_lower"].shift()
gte_previous_bb_6_upper = df['close'] >= df[f"{event_criteria}_upper"].shift()
potential_true_events_df = df[lte_previous_bb_6_lower | gte_previous_bb_6_upper]
potential_false_events_df = df.drop(potential_true_events_df.index)
potential_true_event_dates = potential_true_events_df[['listing', 'date', 'position']].values
actual_true_event_dates = earning_helpers.filter_event_dates_by_earnings_and_position(potential_true_event_dates, earnings_list, minimum_position_dict ,days_around_earning=days_around_earnings, lookback=lookback)
true_event_dates = [((event_date[0], event_date[1], event_date[2]), 1) for event_date in actual_true_event_dates]
potential_false_event_dates = potential_false_events_df[['listing', 'date', 'position']].values
actual_false_event_dates = _random_false_events_from_listing_df(potential_false_event_dates, len(actual_true_event_dates), earnings_list, minimum_position_dict, days_around_earnings,lookback)
false_events_dates = [((event_date[0], event_date[1], event_date[2]), 0) for event_date in actual_false_event_dates]
all_event_dates = true_event_dates + false_events_dates
shuffle(all_event_dates)
print(f"getting a true sequence for listing took {time.perf_counter() - start} sec")
return all_event_dates
And this is my main
from utils import event_helpers, earning_helpers
from utils.queries import get_candle_listing
if __name__ == "__main__":
all_listings = get_candle_listing.get_listings()
earnigns = earning_helpers.get_all_earnings_dates()
res = event_helpers.get_all_valid_events('PD_BBANDS_6', all_listings, earnigns, 2, chunk_size=100)
Full Stack Trace
File "test_multiprocess.py", line 8, in <module>
res = event_helpers.get_all_valid_events('PD_BBANDS_6', all_listings, earnigns, 2, chunk_size=100)
File "/media/data/projects/ml/signal_platform/utils/event_helpers.py", line 53, in get_all_valid_events
valid_events = pool.starmap(get_valid_event_dates_by_listing, payload)
File "/home/froy001/.asdf/installs/python/3.8.12/lib/python3.8/multiprocessing/pool.py", line 372, in starmap
return self._map_async(func, iterable, starmapstar, chunksize).get()
File "/home/froy001/.asdf/installs/python/3.8.12/lib/python3.8/multiprocessing/pool.py", line 771, in get
raise self._value
File "/home/froy001/.asdf/installs/python/3.8.12/lib/python3.8/multiprocessing/pool.py", line 537, in _handle_tasks
put(task)
File "/home/froy001/.asdf/installs/python/3.8.12/lib/python3.8/multiprocessing/connection.py", line 206, in send
self._send_bytes(_ForkingPickler.dumps(obj))
File "/home/froy001/.asdf/installs/python/3.8.12/lib/python3.8/multiprocessing/reduction.py", line 51, in dumps
cls(buf, protocol).dump(obj)
TypeError: cannot pickle '_thread.lock' object
Exception ignored in: <function CommandCursor.__del__ at 0x7f46e91e21f0>
Traceback (most recent call last):
File "/home/froy001/.cache/pypoetry/virtualenvs/signal-platform-31MTNyCe-py3.8/lib/python3.8/site-packages/pymongo/command_cursor.py", line 68, in __del__
File "/home/froy001/.cache/pypoetry/virtualenvs/signal-platform-31MTNyCe-py3.8/lib/python3.8/site-packages/pymongo/command_cursor.py", line 83, in __die
File "/home/froy001/.cache/pypoetry/virtualenvs/signal-platform-31MTNyCe-py3.8/lib/python3.8/site-packages/pymongo/mongo_client.py", line 1696, in _cleanup_cursor
File "/home/froy001/.cache/pypoetry/virtualenvs/signal-platform-31MTNyCe-py3.8/lib/python3.8/site-packages/pymongo/client_session.py", line 466, in _end_session
File "/home/froy001/.cache/pypoetry/virtualenvs/signal-platform-31MTNyCe-py3.8/lib/python3.8/site-packages/pymongo/client_session.py", line 871, in in_transaction
File "/home/froy001/.cache/pypoetry/virtualenvs/signal-platform-31MTNyCe-py3.8/lib/python3.8/site-packages/pymongo/client_session.py", line 362, in active
AttributeError: 'NoneType' object has no attribute 'STARTING'
Update: 01-23
I tried using the multiprocess library using dill but it didn't help

Chaining an iterable task result to a group, then chaining the results of this group to a chord

Related: How to chain a Celery task that returns a list into a group? and Chain a celery task's results into a distributed group, but they lack a MWE, to my understanding do not fully cover my needs, and are maybe outdated.
I need to dynamically pass the output of a task to a parallel group of tasks, then pass the results of this group to a final task. I would like to make this a single "meta task".
Here's my attempt so far:
# chaintasks.py
import random
from typing import List, Iterable, Callable
from celery import Celery, signature, group, chord
app = Celery(
"chaintasks",
backend="redis://localhost:6379",
broker="pyamqp://guest#localhost//",
)
app.conf.update(task_track_started=True, result_persistent=True)
#app.task
def select_items(items: List[str]):
print("In select_items, received", items)
selection = tuple(set(random.choices(items, k=random.randint(1, len(items)))))
print("In select_items, sending", selection)
return selection
#app.task
def process_item(item: str):
print("In process_item, received", item)
return item + "_processed"
#app.task
def group_items(items: List[str]):
print("In group_items, received", items)
return ":".join(items)
#app.task
def dynamic_map_group(iterable: Iterable, callback: Callable):
# Map a callback over an iterator and return as a group
# Credit to https://stackoverflow.com/a/13569873/5902284
callback = signature(callback)
return group(callback.clone((arg,)) for arg in iterable).delay()
#app.task
def dynamic_map_chord(iterable: Iterable, callback: Callable):
callback = signature(callback)
return chord(callback.clone((arg,)) for arg in iterable).delay()
Now, to try this out, let's spin up a celery worker and use docker to for rabbitmq and redis
$ docker run -p 5672:5672 rabbitmq
$ docker run -p 6379:6379 redis
$ celery -A chaintasks:app worker -E
Now, let's try to illustrate what I need:
items = ["item1", "item2", "item3", "item4"]
print(select_items.s(items).apply_async().get())
This outputs: ['item3'], great.
meta_task1 = select_items.s(items) | dynamic_map_group.s(process_item.s())
meta_task1_result = meta_task1.apply_async().get()
print(meta_task1_result)
It gets trickier here: the output is [['b916f5d3-4367-46c5-896f-f2d2e2e59a00', None], [[['3f78d31b-e374-484e-b05b-40c7a2038081', None], None], [['bce50d49-466a-43e9-b6ad-1b78b973b50f', None], None]]].
I am not sure what the Nones, mean, but I guess the UUIDs represents the subtasks of my meta task. But how do I get their results from here?
I tried:
subtasks_ids = [i[0][0] for i in meta_task1_result[1]]
processed_items = [app.AsyncResult(i).get() for i in subtasks_ids]
but this just hangs. What am I doing wrong here? I'm expecting an output looking like ['processed_item1', 'processed_item3']
What about trying to chord the output of select_items to group_items?
meta_task2 = select_items.s(items) | dynamic_map_chord.s(group_items.s())
print(meta_task2.apply_async().get())
Ouch, this raises an AttributeError that I don't really understand.
Traceback (most recent call last):
File "chaintasks.py", line 70, in <module>
print(meta_task2.apply_async().get())
File "/site-packages/celery/result.py", line 223, in get
return self.backend.wait_for_pending(
File "/site-packages/celery/backends/asynchronous.py", line 201, in wait_for_pending
return result.maybe_throw(callback=callback, propagate=propagate)
File "/site-packages/celery/result.py", line 335, in maybe_throw
self.throw(value, self._to_remote_traceback(tb))
File "/site-packages/celery/result.py", line 328, in throw
self.on_ready.throw(*args, **kwargs)
File "/site-packages/vine/promises.py", line 234, in throw
reraise(type(exc), exc, tb)
File "/site-packages/vine/utils.py", line 30, in reraise
raise value
AttributeError: 'NoneType' object has no attribute 'clone'
Process finished with exit code 1
And finally, what I really am trying to do is something like:
ultimate_meta_task = (
select_items.s(items)
| dynamic_map_group.s(process_item.s())
| dynamic_map_chord.s(group_items.s())
)
print(ultimate_meta_task.apply_async().get())
but this leads to:
Traceback (most recent call last):
File "chaintasks.py", line 77, in <module>
print(ultimate_meta_task.apply_async().get())
File "/site-packages/celery/result.py", line 223, in get
return self.backend.wait_for_pending(
File "/site-packages/celery/backends/asynchronous.py", line 199, in wait_for_pending
for _ in self._wait_for_pending(result, **kwargs):
File "/site-packages/celery/backends/asynchronous.py", line 265, in _wait_for_pending
for _ in self.drain_events_until(
File "/site-packages/celery/backends/asynchronous.py", line 58, in drain_events_until
on_interval()
File "/site-packages/vine/promises.py", line 160, in __call__
return self.throw()
File "/site-packages/vine/promises.py", line 157, in __call__
retval = fun(*final_args, **final_kwargs)
File "/site-packages/celery/result.py", line 236, in _maybe_reraise_parent_error
node.maybe_throw()
File "/site-packages/celery/result.py", line 335, in maybe_throw
self.throw(value, self._to_remote_traceback(tb))
File "/site-packages/celery/result.py", line 328, in throw
self.on_ready.throw(*args, **kwargs)
File "/site-packages/vine/promises.py", line 234, in throw
reraise(type(exc), exc, tb)
File "/site-packages/vine/utils.py", line 30, in reraise
raise value
kombu.exceptions.EncodeError: TypeError('Object of type GroupResult is not JSON serializable')
Process finished with exit code 1
What I try to achieve here, is to get an output looking like 'processed_item1:processed_item3'. Is this even doable using celery? Any help here is appreciated.

TypeError: unsupported operand type(s) for +: 'int' and 'RowProxy'

The purpose of this view is to extract and return into JSON format after reading from goodread.com and my database by some calculation.
I tried so many ways to render the API but still no hope
I tried changing user input oneIsbn with 'oneIsbn' while querying in each listed SQL queries. what I got on my browser is just like this
{
"Error": "Invalid ISBN 0380795272"
}
My code snippet
#app.route("/api/<oneIsbn>", methods=["GET", "POST"])
#login_required
def api(oneIsbn):
"""Returns in JSON format for a single Book"""
if request.method == "GET":
check = db.execute("SELECT * FROM books WHERE isbn= :isbn",
{"isbn": oneIsbn}).fetchone()
if check is None:
return jsonify({"Error": f"Invalid ISBN {oneIsbn}"}), 405
else:
res = requests.get(
"https://www.goodreads.com/book/review_counts.json",
params={
"key": "x9fJg",
"isbns": oneIsbn})
if res.status_code != 200:
raise Exception("ERROR: API request unsuccessful.")
else:
data = res.json()
y = data["books"][0]["work_reviews_count"]
r = data["books"][0]["average_rating"]
isbn = db.execute("SELECT isbn FROM books WHERE isbn = :isbn",
{"isbn": oneIsbn}).fetchone()
title = db.execute("SELECT title FROM books WHERE isbn = :isbn",
{"isbn": oneIsbn}).fetchone()
author = db.execute("SELECT author FROM books WHERE isbn = :isbn",
{"isbn": oneIsbn}).fetchone()
year = db.execute("SELECT year FROM books WHERE isbn = :isbn",
{"isbn": oneIsbn}).fetchone()
x = db.execute("SELECT COUNT(reviews) FROM reviews WHERE isbn = :isbn",
{"isbn": 'oneIsbn'}).fetchone()
z = db.execute("SELECT rating FROM reviews WHERE isbn = :isbn",
{"isbn": oneIsbn}).fetchone()
rev = int(y)
revCount = int(x.count)
bothReviewValue = sum((revCount,rev))
# listRate = float(z)
rat = float(r)
bothRatingValue = sum([z,rat]) / 2
return jsonify(
ISBN=isbn,
TITLE=title,
AUTHOR=author,
YEAR=year,
REVIEWS=bothReviewValue,
RATING=bothRatingValue
), 422
TRACEBACK
TypeError
TypeError: unsupported operand type(s) for +: 'int' and 'RowProxy'
Traceback (most recent call last)
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 2464, in __call__
return self.wsgi_app(environ, start_response)
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 2450, in wsgi_app
response = self.handle_exception(e)
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 1867, in handle_exception
reraise(exc_type, exc_value, tb)
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\_compat.py", line 39, in reraise
raise value
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 2447, in wsgi_app
response = self.full_dispatch_request()
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 1952, in full_dispatch_request
rv = self.handle_user_exception(e)
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 1821, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\_compat.py", line 39, in reraise
Open an interactive python shell in this frameraise value
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 1950, in full_dispatch_request
rv = self.dispatch_request()
File "C:\Users\Beacon\AppData\Local\Programs\Python\Python38\Lib\site-packages\flask\app.py", line 1936, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "C:\Users\Beacon\Desktop\THINKFUL DOCS\PYTHON\PYTHON WORKSPACE\project1\application.py", line 39, in wrapped_view
return view(**kwargs)
File "C:\Users\Beacon\Desktop\THINKFUL DOCS\PYTHON\PYTHON WORKSPACE\project1\application.py", line 233, in api
bothRatingValue = sum([z,rat]) / 2
TypeError: unsupported operand type(s) for +: 'int' and 'RowProxy'
The debugger caught an exception in your WSGI application. You can now look at the traceback which led to the error.
To switch between the interactive traceback and the plaintext one, you can click on the "Traceback" headline. From the text traceback you can also create a paste of it. For code execution mouse-over the frame you want to debug and click on the console icon on the right side.
You can execute arbitrary Python code in the stack frames and there are some extra helpers available for introspection:
dump() shows all variables in the frame
dump(obj) dumps all that's known about the object
Brought to you by DON'T PANIC, your friendly Werkzeug powered traceback interpreter.
This method I used really works great. I already missed something like
The sum method should be put in tuple not in list and the pulled string rate should cast to float.
rev = int(y)
revCount = int(x.count)
bothReviewValue = sum((revCount,rev))
listRate = float(z)
rat = float(r)
bothRatingValue = sum((listRate,rat)) / 2

Socket, AttributeError: 'str' object has no attribute 'send'

If anyone actually reads this thanks!
Anyway on to the problem, every time I run my code I get an 'AttributeError' and I can't find where the issue is. I'm using Socket, tKinter, os and multiprocessing. Here's my code(I know its now the most pythony thing in the world but hey I'm just playing with sockets):
#---Import statments---#
import socket, os, multiprocessing
import tkinter as tk
#---global variables---#
setup = ''
cleintsocket = ''
#---Defs---#
def setup():
global host, port, user
host = setup_host_box.get()
port = setup_port_box.get()
user = setup_user_box.get()
def connect(self, hostname, connectingport):
self.connect((hostname, int(connectingport)))
print('connected')
multiprocessing.Process(target = resv()).start()
def create_sock(nhost, nport):
global cleintsocket
cleintsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect(cleintsocket, nhost, nport)
def send(username, cleintsock):
'''to send a message'''
usrmsg = (username + ' - ' + chat_msg_box.get()).encode()
cleintsock.send(usrmsg)
def resv(sock):
'''resive subscript, run through mutiprosses module'''
while True:
rmsg = sock.recv(1024).decode()
chat_msg_display_text.insert('end.0.', rmsg)
def chat():
'''loads chat page'''
setup_host_text.pack_forget()
setup_host_box.pack_forget()
setup_port_text.pack_forget()
setup_port_box.pack_forget()
setup_user_text.pack_forget()
setup_user_box.pack_forget()
setup_confirm_button.pack_forget()
chat_msg_desplay_text.pack()
chat_msg_box.pack()
chat_msg_send_button.pack()
def start():
'''starts the setup page'''
setup_host_text.pack()
setup_host_box.pack()
setup_port_text.pack()
setup_port_box.pack()
setup_user_text.pack()
setup_user_box.pack()
setup_confirm_button.pack()
#---TK Setup---#
#--window setup--#
window = tk.Tk()
window.title('Chat')
window.geometry('600x600')
window.configure(background='#ffffff')
#--connection setup page--#
setup_host_text = tk.Label(window, text = 'Host')
setup_host_box = tk.Entry(window, bg = '#ffffff')
setup_port_text = tk.Label(window, text = 'Port')
setup_port_box = tk.Entry(window, bg = '#ffffff')
setup_user_text = tk.Label(window, text = 'Username')
setup_user_box = tk.Entry(window, bg = '#ffffff')
setup_confirm_button = tk.Button(window,text = 'Connect', command = setup())
#--chat page--#
chat_msg_box = tk.Entry(window, bg='#ffffff')
chat_msg_send_button = tk.Button(window, text = 'send', command = send(user, cleintsocket))
chat_msg_display_text = tk.Text(window, width=600, height=500, wrap = 'word')
#--------------#
start()
The python console is saying there is an error here chat_msg_send_button = tk.Button(window, text = 'send', command = send(user, cleintsocket)) that produces an AttributeError: 'str' object has no attribute 'send' error however I can't see any problems with it.
Please help.
Thanks again!
EDIT: Here's the error(Not needed now but this is for principle)
Traceback (most recent call last):
File ".../tkcleint.py", line 76, in <module>
chat_msg_send_button = tk.Button(window, text = 'send', command = send(user, cleintsocket))
File ".../tkcleint.py", line 29, in send
cleintsock.send(usrmsg)
AttributeError: 'str' object has no attribute 'send'
First off (as #R.Murry has pointed out) you are calling the functions immediately and passing their return value as the command which in this case is None, so I'd start by fixing that up:
setup_confirm_button = tk.Button(window,text = 'Connect', command = setup) #don't call setup here
...
def send_button_callback():
send(user, cleintsocket)
chat_msg_send_button = tk.Button(window, text = 'send', command = send_button_callback)
next, it is important to include the whole error message in your question because that is not the line that is running into problems:
Traceback (most recent call last):
File ".../test.py", line 76, in <module>
chat_msg_send_button = tk.Button(window, text = 'send', command = send(user, cleintsocket))
File ".../test.py", line 29, in send
cleintsock.send(usrmsg)
AttributeError: 'str' object has no attribute 'send'
You pass the variable cleintsocket into send and try to use the .send method of a socket, however it is initialized to an empty string:
cleintsocket = ''
so if you call send before it is changed to a socket you will get that error, simply check whether it has been initialized yet:
def send(username, cleintsock):
'''to send a message'''
if cleintsock: #not an empty string
usrmsg = (username + ' - ' + chat_msg_box.get()).encode()
cleintsock.send(usrmsg)
#else:window.bell() #play a error beep

Made Locust to login to a Web Application

I want locust to be able to login to my web application and start to click in the links inside the web application.
With this code I just get activity for the front page with the login and i don't get any notification from inside the application.
Code:
import random
from locust import HttpLocust, TaskSet, task
from pyquery import PyQuery
class WalkPages(TaskSet):
def on_start(self):
self.client.post("/", {
"UserName": "my#email.com",
"Password": "2Password!",
"submit": "Sign In"
})
self.index_page()
#task(10)
def index_page(self):
r = self.client.get("/Dashboard.mvc")
pq = PyQuery(r.content)
link_elements = pq("a")
self.urls_on_current_page = []
for l in link_elements:
if "href" in l.attrib:
self.urls_on_current_page.append(l.attrib["href"])
#task(30)
def load_page(self):
url = random.choice(self.urls_on_current_page)
r = self.client.get(url)
class AwesomeUser(HttpLocust):
task_set = WalkPages
host = "https://myenv.beta.webapp.com"
min_wait = 20 * 1000
max_wait = 60 * 1000
I get the follow msg in the terminal after the first round.
[2015-02-13 12:08:43,740] webapp-qa/ERROR/stderr: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 267, in run
self.execute_next_task()
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 293, in execute_next_task
self.execute_task(task["callable"], *task["args"], **task["kwargs"])
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 305, in execute_task
task(self, *args, **kwargs)
File "/home/webapp/LoadTest/locustfile.py", line 31, in load_page
url = random.choice(self.urls_on_current_page)
File "/usr/lib/python2.7/random.py", line 273, in choice
return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty
IndexError: list index out of range
[2015-02-13 12:08:43,752] webapp-qa/ERROR/stderr: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 267, in run
self.execute_next_task()
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 293, in execute_next_task
self.execute_task(task["callable"], *task["args"], **task["kwargs"])
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 305, in execute_task
task(self, *args, **kwargs)
File "/home/webapp/LoadTest/locustfile.py", line 31, in load_page
url = random.choice(self.urls_on_current_page)
File "/usr/lib/python2.7/random.py", line 273, in choice
return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty
IndexError: list index out of range
[2015-02-13 12:08:43,775] webapp-qa/ERROR/stderr: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 267, in run
self.execute_next_task()
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 293, in execute_next_task
self.execute_task(task["callable"], *task["args"], **task["kwargs"])
File "/usr/local/lib/python2.7/dist-packages/locust/core.py", line 305, in execute_task
task(self, *args, **kwargs)
File "/home/webapp/LoadTest/locustfile.py", line 31, in load_page
url = random.choice(self.urls_on_current_page)
File "/usr/lib/python2.7/random.py", line 273, in choice
return seq[int(self.random() * len(seq))] # raises IndexError if seq is empty
IndexError: list index out of range
Your list may be empty.
#task(30)
def load_page(self):
if self.urls_on_current_page:
url = random.choice(self.urls_on_current_page)
r = self.client.get(url)
I takes time but someone may need this. My findings in your code: login requests seems not correct (check mine if correct), you cannot reach a variable defined inside of a function from another function, giving task(10) is not suitable for data setter function. Set urls_on_current_page as a class variable to serve for other class members. See my code and comment:
import random
from locust import HttpLocust, TaskSet, task
from pyquery import PyQuery
class WalkPages(TaskSet):
# define variable here to access them from inside the functions
urls_on_current_page = []
def login(self):
self.client.post("/login", data = {"UserName": "mesutgunes#email.com", "Password": "password"})
def get_urls(self):
r = self.client.get("/Dashboard.mvc")
pq = PyQuery(r.content)
link_elements = pq("a")
for link in link_elements:
if key in link.attrib and "http" not in link.attrib[key]:
# there maybe external link on the page
self.urls_on_current_page.append(link.attrib[key])
def on_start(self):
self.login()
self.get_urls()
#task(30)
def load_page(self):
url = random.choice(self.urls_on_current_page)
r = self.client.get(url)
class AwesomeUser(HttpLocust):
task_set = WalkPages
host = "https://myenv.beta.webapp.com"
min_wait = 20 * 1000
max_wait = 60 * 1000