Can not receive message from queues when the Exchange object's type is "direct" - celery

When I change the "direct" type to "fanout" or "topic", it is work.But the "direct" type is not work.
I wrote according to the example of the official website.
And I tried deleting the code => "from future import absolute_import, unicode_literals".But it still not work.
queues.py
it is work if I change type to 'fanout' or 'topic'.
from __future__ import absolute_import, unicode_literals
from kombu import Queue, Exchange
exchange = Exchange('demo_exchange', type='topic')
demo_queues = [
Queue('one', exchange, routing_key='o'),
Queue('two', exchange, routing_key='t'),
Queue('three', exchange, routing_key='th'),
]
worker.py
from __future__ import absolute_import, unicode_literals
from kombu.mixins import ConsumerMixin
from kombu import Connection
from queues import demo_queues
class Worker(ConsumerMixin):
def __init__(self, connection):
self.connection = connection
def get_consumers(self, Consumer, channel):
return [Consumer(queues=demo_queues, accept=['pickle', 'json'], callbacks=[self.on_task])]
def on_task(self, body, message):
args = body['args']
func = body['func']
print(args, func)
func()
message.ack()
if __name__ == '__main__':
with Connection('redis://localhost:6379/0') as conn:
try:
worker = Worker(conn)
worker.run()
except KeyboardInterrupt:
print('bye')
client.py
from __future__ import absolute_import, unicode_literals
from kombu.pools import producers
from queues import exchange
priority_to_routing_key = {
'high': 'o',
'mid': 't',
'low': 'th',
}
def send_tasks(conn, func, args, priority='high'):
data = {'func': func, 'args': args}
with producers[conn].acquire(block=True) as producer:
routing_key = priority_to_routing_key[priority]
producer.publish(data,
serializer='pickle',
exchange=exchange,
declare=[exchange],
routing_key=routing_key)
if __name__ == '__main__':
from kombu import Connection
from tasks import func_task
connection = Connection('redis://localhost:6379/0')
send_tasks(connection, func=func_task, args=('test hello'), priority='mid')
tasks.py
def func_task(n='hello'):
print(n, '---====')
I hope to work out this.

Related

locust 0.9 to 1.3 Exception: No tasks defined. use the #task decorator or set the tasks property of the User

I have the following code which run fine in locust 0.9. Now with 1.3 it throws the exception mentioned in the title. Can anyone see what's wrong?
import time
import random
import datetime
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
import logging
import json
import os
from random import randint, choice
from locust import HttpUser, TaskSet, task
from pyquery import PyQuery
requests.packages.urllib3.disable_warnings()
class FrontPage(TaskSet):
def on_start(self):
self.client.verify = False
#task(20)
def index(self):
self.client.get("/")
class DestinationPagesFixed(TaskSet):
de_paths = ["/belgien", "daenemark", "deutschland", "frankreich", "griechenland"
, "italien"
, "luxemburg"
]
def on_start(self):
self.client.verify = False
#task
def test_1(self):
paths = self.de_paths
path = choice(paths)
self.client.get(path, name="Static page")
class UserBehavior(TaskSet):
tasks = {FrontPage: 15, DestinationPagesFixed: 19}
class WebsiteUser(HttpUser):
task_set = UserBehavior
min_wait = 400
max_wait = 10000
Change
task_set = UserBehavior
to
tasks = [UserBehavior]
Or (skipping your UserBehaviour class entirely)
tasks = {FrontPage: 15, DestinationPagesFixed: 19}

Error in running Apache Beam Python SplittableDoFn

Error encountered while trying pubsub io > splittable dofn
RuntimeError: Transform node
AppliedPTransform(ParDo(TestDoFn)/ProcessKeyedElements/GroupByKey/GroupByKey,
_GroupByKeyOnly) was not replaced as expected.
Can someone help me with reviewing the code for anything I might be doing incorrectly in there
Code:
"""
python examples/test_restriction_unbounded.py --project mk2 --topic projects/mk2/topics/testing
"""
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import csv
import logging
import sys
import time
from datetime import datetime
import apache_beam as beam
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.io.restriction_trackers import OffsetRestrictionTracker, OffsetRange
from apache_beam.transforms.core import RestrictionProvider
class TestProvider(RestrictionProvider):
def initial_restriction(self, element):
return OffsetRange(0, 1)
def create_tracker(self, restriction):
return OffsetRestrictionTracker(restriction)
def restriction_size(self, element, restriction):
return restriction.size()
class TestDoFn(beam.DoFn):
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(
TestProvider())):
import pdb; pdb.set_trace()
cur = restriction_tracker.current_restriction().start
while restriction_tracker.try_claim(cur):
return element
def run(argv=None, save_main_session=True):
parser = argparse.ArgumentParser()
parser.add_argument('--topic', type=str, help='Pub/Sub topic to read from')
args, pipeline_args = parser.parse_known_args(argv)
options = PipelineOptions(pipeline_args)
options.view_as(StandardOptions).streaming = True
with beam.Pipeline(options=options) as p:
# data = ['abc', 'defghijklmno', 'pqrstuv', 'wxyz']
# actual = (p | beam.Create(data) | beam.ParDo(ExpandingStringsDoFn()))
scores = p | beam.io.ReadFromPubSub(topic=args.topic) | beam.ParDo(TestDoFn())
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
You are ingesting data from pub/sub by steaming. Then you have to create batches by window before apply this kind of transforms: (ParDo(TestDoFn)/ProcessKeyedElements/GroupByKey/GroupByKey, _GroupByKeyOnly)
Pub/Sub with window example: https://cloud.google.com/pubsub/docs/pubsub-dataflow
Try to do like this:
class GroupWindowsIntoBatches(beam.PTransform):
"""A composite transform that groups Pub/Sub messages
"""
def __init__(self, window_size):
# Convert minutes into seconds.
self.window_size = int(window_size * 60)
def expand(self, pcoll):
return (
pcoll
# Assigns window info to each Pub/Sub message based on its
# publish timestamp.
| "Window into Fixed Intervals"
>> beam.WindowInto(window.FixedWindows(self.window_size))
)
def run(argv=None, save_main_session=True):
parser = argparse.ArgumentParser()
parser.add_argument('--topic', type=str, help='Pub/Sub topic to read from')
args, pipeline_args = parser.parse_known_args(argv)
options = PipelineOptions(pipeline_args)
options.view_as(StandardOptions).streaming = True
window_size = 1.0
with beam.Pipeline(options=options) as p:
scores = (p
| beam.io.ReadFromPubSub(topic=args.topic)
| "WindowInto" >> GroupWindowsIntoBatches(window_size)
| beam.ParDo(TestDoFn())
)
I had the same error. Removing the streaming option solved the problem for me.

Getting error while trying to insert data into MongoDB

I am trying to insert data into MongoDB using Play-scala and ReactiveMongo.
Here is my DbimpService.scala:
package services
import models.Post
import reactivemongo.bson.BSONDocument
import reactivemongo.api.MongoDriver
import reactivemongo.api.collections.bson.BSONCollection
import scala.concurrent.ExecutionContext
import javax.inject.Inject
import play.api.libs.json.Json
import reactivemongo.play.json.collection.JSONCollection
import reactivemongo.api.commands.WriteResult
import scala.concurrent.Future
import org.apache.xerces.util.DatatypeMessageFormatter
class Dbimpservice #Inject() (implicit ec:ExecutionContext) extends Dbservice {
def create(p:Post):String={
var status = "Not Saved"
val driver = new MongoDriver
val connection = driver.connection(List("localhost"))
val db = connection("application")
val collection = db[BSONCollection]("post")
val futureList = collection.insert[Post](p)
futureList.onComplete { case sucess => println(sucess) }
return status
}
}
Here is my HomeController.scala:
package controllers
import javax.inject._
import play.api._
import play.api.mvc._
import models._
import scala.util.{ Failure, Success }
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import reactivemongo.api.{ MongoDriver, MongoConnection }
import reactivemongo.play.json.collection.JSONCollection
import reactivemongo.bson.BSONDocument
import reactivemongo.api.commands.WriteResult
import reactivemongo.api.collections.bson.BSONCollection
import play.api.libs.json.Json
import services.Dbservice
import services.Dbimpservice
import services.Dbservice
import scala.concurrent.ExecutionContext
import scala.concurrent.Await
import scala.concurrent.duration.Duration
/**
* This controller creates an `Action` to handle HTTP requests to the
* application's home page.
*/
#Singleton
class HomeController #Inject() (implicit ec:ExecutionContext,val Dbservice : Dbimpservice)extends Controller {
/**
* Create an Action to render an HTML page with a welcome message.
* The configuration in the `routes` file means that this method
* will be called when the application receives a `GET` request with
* a path of `/`.
*/
def index = Action{
Ok("Hai")
}
def read = Action.async {
val query = BSONDocument()
val driver = new MongoDriver
val connection = driver.connection(List("localhost:27017"))
val db = connection("application")
val collection = db[BSONCollection]("post")
val futureList = collection.find(query).cursor[Post]().collect[List]()
futureList.map { list =>
Ok(list.toString())
}
}
def create = Action(BodyParsers.parse.json) { request =>
val personResult = request.body.validate[Post]
personResult.fold(
errors => {
BadRequest(Json.obj("status " ->"ERROR"))
},
valid = fun
)
}
def fun:Post => Result= { post =>
var ans = Dbservice.create(post)
Ok(ans)
}
}
I am trying to insert the data but not getting inserted and the error which i am getting is
Failure(reactivemongo.core.errors.ConnectionNotInitialized: MongoError['Connection is missing metadata (like protocol version, etc.) The connection pool is probably being initialized.'])
Some one please help me, I even referred the link
http://stackoverflow.com/questions/31456517/embedmongo-with-reactivemongo-process-does-not-exit
but did not get
Guessing that you are using a recent version of ReactiveMongo (0.11.7+), you are using a deprecated DB resolution code (connection(dbName) aka connection.apply(dbName).
See also
You need to use the asynchronous resolution, which benefit from the failover (to handle possible network latency/incident). The following code must so be refactored.
val db = connection("application")
val collection = db[BSONCollection]("post")
val futureList = collection.insert[Post](p)
Using the new DB resolution:
for {
db <- connection.database("application")
collection = db("post")
res <- collection.insert(p)
} yield res

unable to get the value of enumerator in Play 2.5 websocktes

package controllers
import javax.inject._
import play.api._
import play.api.mvc._
import play.api.libs.json._
import play.api.libs.streams._
import akka.stream._
import akka.actor._
import akka.actor.Actor
import akka.actor.ActorSystem
import akka.actor.ActorRef
import akka.actor.Props
import akka.pattern.ask
import akka.util.Timeout
import akka.actor.PoisonPill
import scala.concurrent.duration._
import akka.stream.Materializer
import play.api.cache._
import play.api.libs.iteratee._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.concurrent._
import play.api.libs.ws.WSClient
/**
* This controller creates an `Action` to handle HTTP requests to the
* application's home page.
*/
#Singleton
class HomeController #Inject() (cache:CacheApi) (implicit actorSystem:ActorSystem , materializer:Materializer) extends Controller {
def validate(receivedMsg:JsValue,outChannel:Concurrent.Channel[JsValue], privateChannel:Concurrent.Channel[JsValue],outEnumerator:Enumerator[JsValue],privateEnumerator:Enumerator[JsValue]) = {
val user_key = (receivedMsg \ "username").get
val username = user_key.toString().stripSuffix("\"").stripPrefix("\"")
val validate_user :Option[String] = cache.get[String](username)
val valid_result = validate_user.toString()
if(valid_result.equals("None")) {
//cache is not set
// println(valid_result)
//add user
cache.set(username,username)
Ok.withSession(username->username)
//notify all users
val successMsg = Json.parse("""{"username":"Server","message":"A new user has been connected"}""")
outChannel.push(successMsg)
(outEnumerator)
}
else{
//cache is already set
//send error msg to new user
val errorMsg = Json.parse("""{"username":"Server","message":"This username is already taken"}""")
// val (privateEnumerator,privateChannel) = Concurrent.broadcast[JsValue]
privateChannel.push(errorMsg)
(privateEnumerator)
}
}
val (outEnumerator,outChannel) = Concurrent.broadcast[JsValue]; //public stuff
def socket = WebSocket.using[JsValue] {
request => {
val (privateEnumerator,privateChannel) = Concurrent.broadcast[JsValue]
var enumerator = privateEnumerator
var ret = 1;
val inIteratee: Iteratee[JsValue, Unit] = Iteratee.foreach[JsValue](receivedMsg => {
enumerator = validate(receivedMsg,outChannel,privateChannel,outEnumerator,privateEnumerator)
})
(inIteratee, enumerator)
}
}
}
I am new to scala and Play webSockets . I am working in play 2.5.3. Above depending upon the situation , i am trying to get the enumerator of private channel or public channel(i.e. for all connected users). But even if it returns it correctly, i couldn't get it in here (iteratee,enumerator). What am i doing wrong?
Second answer of this post (Broadcasting messages in Play Framework WebSockets) does the same thing.
Create an actor say UserManagerActor who sole purpose is to manage the users and maintain them.
UserManagerActor calls broadcast.
val (enumerator, channel) = Concurrent.broadcast[String].
channel helps in broadcasting the messages to all the users can once using the push method.
Now the actor can manage state of the users in a Map
val users = Map[String, (Enumerator[String],Channel[String])]()
Ensure the actor is killed once all users get disconnected.
Use Iteratee to know is the user is disconnected
Also remove disconnected users to keep the size of the Map manageable.

Spray cache for get service?

i am using cache headers like no-cache and no-store, i don´t know to do application level caching (maybe i could need some documentation here)
i print in console the data result when i call the method in mongodb, but it only works once after i run my app(that ocur in get service), the second time my app doesn´t print nothing, that is, it doesn't call the method.... that ocur when i try get a list of users the second time... for example, when i post something like insert new a user. i need to see the changes in the db in the frontend, my app seems get data from cache and it does´nt call the method to get the users again
the code I use in my spray scala service is
`package api
import spray.routing.Directives
import akka.actor.ActorRef
import spray.http.MediaTypes._
import core.UserRegister
import core.User
import scala.concurrent.ExecutionContext
import core.{User, LoginActor}
import akka.util.Timeout
import LoginActor._
import spray.http._
import scala.Some
import spray.json.JsonFormat
import spray.json.RootJsonFormat
import spray.json.JsArray
import spray.json.CollectionFormats
import spray.json._
import DefaultJsonProtocol._
import scala.util.parsing.json.JSONArray
import scala.util.parsing.json._
import data.UserDao
import core.UserListActor
import spray.routing.Route
import core.CoreActors
import spray.routing.HttpService
import core.Core
import akka.actor.{Props, ActorRefFactory, ActorSystem}
import akka.actor.ActorContext
import spray.routing.HttpServiceActor
import spray.http.HttpHeaders.RawHeader
import scala.concurrent.duration.Duration
import spray.routing.authentication.BasicAuth
import spray.routing.directives.CachingDirectives._
import spray.httpx.encoding._
import spray.caching._
import spray.caching.{LruCache, Cache}
import spray.caching.Cache
import web.StaticResources
import scala.concurrent.Future
class ListarUsuarioService(listaUsuario: ActorRef)(implicit executionContext: ExecutionContext)
extends Directives with DefaultJsonFormats with SprayCORSsupport with CORSSupport{
import akka.pattern.ask
import scala.concurrent.duration._
implicit val userFormat = jsonFormat2(User)
implicit val registerFormat = jsonFormat1(Register)
implicit val userRegisterFormat = jsonFormat5(UserRegister)
implicit val registeredFormat = jsonObjectFormat[Registered.type]
implicit val notRegisteredFormat = jsonObjectFormat[NotRegistered.type]
implicit val system = ActorSystem()
import system.dispatcher
lazy val simpleRouteCache = routeCache()
lazy val simpleCache = routeCache(maxCapacity = 5000, timeToIdle = 0.001 hour)
//lazy val cache = LruCache()
def routeCache(maxCapacity: Int = 2000, initialCapacity: Int = 100, timeToLive: Duration = 5 seconds,
timeToIdle: Duration = Duration.Inf): Cache[RouteResponse] =
LruCache(maxCapacity, initialCapacity, timeToLive, timeToIdle)
// and a Cache for its result type
val cache2: Cache[Double] = LruCache()
val listaUsuariosroute:Route =
cache(routeCache()){
cors{ addCORSDefaultSupport(){
path("usuario") {
get {
respondWithMediaType(`application/json`) {
_.complete {
//Elemento de la lista
//listaUsuarios(1)
UserListActor.listaUsuarios.toJson.convertTo[JsArray].prettyPrint }
}
}
}
}
}
}//cors
}
`
I am using Cors and cache headers like no-store, public and no-cache, but it doesn´t works, even i clear my cache browser but it neither works