Python kombu.Producer() Examples
The following are 12
code examples of kombu.Producer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
kombu
, or try the search function
.
Example #1
Source File: alerta_amqp.py From alerta-contrib with MIT License | 6 votes |
def __init__(self, name=None): if app.config['DEBUG']: setup_logging(loglevel='DEBUG', loggers=['']) self.connection = BrokerConnection(AMQP_URL) try: self.connection.connect() except Exception as e: LOG.error('Failed to connect to AMQP transport %s: %s', AMQP_URL, e) raise RuntimeError self.channel = self.connection.channel() self.exchange_name = AMQP_TOPIC self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel) self.producer = Producer(exchange=self.exchange, channel=self.channel) super(FanoutPublisher, self).__init__(name) LOG.info('Configured fanout publisher on topic "%s"', AMQP_TOPIC)
Example #2
Source File: check.py From data_integration_celery with GNU General Public License v3.0 | 6 votes |
def test(url): from kombu import Exchange, Queue, Connection, Consumer, Producer task_queue = Queue('tasks', exchange=Exchange('celery', type='direct'), routing_key='tasks') # 生产者 with Connection(url) as conn: with conn.channel() as channel: producer = Producer(channel) producer.publish({'hello': 'world'}, retry=True, exchange=task_queue.exchange, routing_key=task_queue.routing_key, declare=[task_queue]) def get_message(body, message): print("receive message: %s" % body) # message.ack() # 消费者 with Connection(url) as conn: with conn.channel() as channel: consumer = Consumer(channel, queues=task_queue, callbacks=[get_message, ], prefetch_count=10) consumer.consume(no_ack=True)
Example #3
Source File: notify.py From commissaire with GNU General Public License v3.0 | 6 votes |
def connect(self, exchange, channel): # pragma: no cover """ Readies the StorageNotify for publishing notification messages by setting up a kombu.Producer. :param exchange: The exchange for publishing notifications. :type exchange: kombu.Exchange :param channel: The channel to bind to. :type channel: kombu.transport.base.StdChannel """ name = self.__class__.__name__ self.logger.debug('Connecting {}'.format(name)) self._queue = kombu.Queue(exchange=exchange, channel=channel) self._queue.declare() self._producer = kombu.Producer(channel, exchange)
Example #4
Source File: logger.py From KubeOperator with Apache License 2.0 | 5 votes |
def producer(self): return Producer(self.connection)
Example #5
Source File: queue_manager.py From GloboNetworkAPI with Apache License 2.0 | 5 votes |
def send(self): try: # Connection conn = Connection(self.broker) # Channel channel = conn.channel() # Exchange task_exchange = Exchange(self._exchange_name, type=self._queue_type) # Queues if self._queue_name: queue = Queue(name=self._queue_name, channel=channel, exchange=task_exchange, routing_key=self._routing_key) queue.declare() # Producer producer = Producer(exchange=task_exchange, channel=channel, routing_key=self._routing_key) # Send message for message in self._msgs: serialized_message = json.dumps(message, ensure_ascii=False) producer.publish(serialized_message) conn.close() except Exception, e: self.log.error( u'QueueManagerError - Error on sending objects from queue.') self.log.debug(e) raise Exception( 'QueueManagerError - Error on sending objects to queue.')
Example #6
Source File: base.py From celery-message-consumer with Apache License 2.0 | 5 votes |
def get_producer(self, handler, routing_key=None): return kombu.Producer( handler.channel, exchange=handler.exchanges[handler.exchange], routing_key=handler.routing_key if routing_key is None else routing_key, serializer='json' )
Example #7
Source File: check.py From data_integration_celery with GNU General Public License v3.0 | 5 votes |
def sender(url): logger.info("start sender") with Connection(url) as conn: with conn.channel() as channel: # producer = Producer(channel) producer = channel.Producer() while True: message = time.strftime('%H:%M:%S', time.localtime()) producer.publish( body=message, retry=True, exchange='celery', routing_key='rkeytest' ) logger.info('send message: %s' % message) while True: # 检查队列,以重新得到消息计数 queue = channel.queue_declare(queue='queuetest', passive=True) messageCount = queue.message_count logger.info('messageCount: %d' % messageCount) if messageCount < 100: time.sleep(0.5) break time.sleep(1)
Example #8
Source File: pulse.py From jx-sqlite with Mozilla Public License 2.0 | 5 votes |
def send(self, topic, message): """Publishes a pulse message to the proper exchange.""" if not message: Log.error("Expecting a message") message._prepare() if not self.connection: self.connect() producer = Producer( channel=self.connection, exchange=Exchange(self.settings.exchange, type='topic'), routing_key=topic ) # The message is actually a simple envelope format with a payload and # some metadata. final_data = Data( payload=message.data, _meta=set_default({ 'exchange': self.settings.exchange, 'routing_key': message.routing_key, 'serializer': self.settings.serializer, 'sent': time_to_string(datetime.datetime.now(timezone(self.settings.broker_timezone))), 'count': self.count }, message.metadata) ) producer.publish(jsons.scrub(final_data), serializer=self.settings.serializer) self.count += 1
Example #9
Source File: kombu_manager.py From python-socketio with MIT License | 5 votes |
def _producer(self): return self._connection().Producer(exchange=self._exchange(), **self.producer_options)
Example #10
Source File: amqp_source.py From RackHD with Apache License 2.0 | 5 votes |
def inject(self, exchange, routing_key, payload): self.__logs.irl.debug("Injecting a test AMQP message: ex=%s, rk=%s, payload=%s", exchange, routing_key, payload) if not isinstance(exchange, Exchange): exchange = Exchange(exchange, 'topic') prod = Producer(self.__connection, exchange=exchange, routing_key=routing_key) prod.publish(payload)
Example #11
Source File: amqp_source.py From RackHD with Apache License 2.0 | 5 votes |
def test_helper_sync_send_msg(self, exchange, ex_rk, send_rk, payload): ex = Exchange(exchange, 'topic') queue = Queue(exchange=ex, routing_key=ex_rk + '.*', exclusive=True, channel=self.__connection) queue.declare() prod = Producer(self.__connection, exchange=ex, routing_key=send_rk) prod.publish(payload) return queue
Example #12
Source File: base.py From celery-message-consumer with Apache License 2.0 | 4 votes |
def setUp(self): super(BaseRetryHandlerIntegrationTest, self).setUp() # NOTE: # must be a real rabbitmq instance, we rely on rabbitmq # features (dead-letter exchange) for our retry queue logic self.connection = kombu.Connection( settings.BROKER_URL, connect_timeout=1, ) self.connection.ensure_connection() self.connection.connect() self.channel = self.connection.channel() self.handler = AMQPRetryHandler( self.channel, routing_key=self.routing_key, queue=self.routing_key, exchange=self.exchange, queue_arguments={}, func=lambda body: None, backoff_func=lambda attempt: 0, ) self.handler.declare_queues() queues = [ self.handler.worker_queue, self.handler.retry_queue, self.handler.archive_queue, ] for queue in queues: queue.purge() self.archive_consumer = kombu.Consumer( channel=self.channel, queues=[self.handler.archive_queue], callbacks=[self.handle_archive] ) for consumer in [self.handler.consumer, self.archive_consumer]: consumer.consume() self.producer = kombu.Producer( self.channel, exchange=self.handler.exchanges[self.handler.exchange], routing_key=self.routing_key, serializer='json' ) self.archives = []