Python kombu.Exchange() Examples

The following are 30 code examples of kombu.Exchange(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module kombu , or try the search function .
Example #1
Source File: notify.py    From commissaire with GNU General Public License v3.0 6 votes vote down vote up
def _publish(self, event, model_instance):
        """
        Internal function to publish "created", "deleted", and "updated"
        notification messages.

        :param event: The event name ("created", "deleted", or "updated")
        :type event: str
        :param model_instance: The model instance upon which the event occurred
        :type model_instance: commissaire.model.Model
        """
        class_name = model_instance.__class__.__name__
        body = {
            'event': event,
            'class': class_name,
            'model': model_instance.to_dict_safe()
        }
        routing_key = 'notify.storage.{}.{}'.format(class_name, event)
        if self._producer:
            self.logger.debug('Publish "{}": {}'.format(routing_key, body))
            self._producer.publish(
                body, routing_key,
                kombu.Exchange.TRANSIENT_DELIVERY_MODE)
        else:
            # This means the connect() method was not called.
            self.logger.warn('Not publishing "%s"', routing_key) 
Example #2
Source File: controller.py    From umbra with Apache License 2.0 6 votes vote down vote up
def start(self):
        self._browsing_threads = set()
        self._browsing_threads_lock = threading.Lock()

        self._exchange = kombu.Exchange(name=self.exchange_name, type='direct',
                durable=True)

        self._reconnect_requested = False

        self._producer = None
        self._producer_lock = threading.Lock()
        with self._producer_lock:
            self._producer_conn = kombu.Connection(self.amqp_url)
            self._producer = self._producer_conn.Producer(serializer='json')

        self._consumer_thread = threading.Thread(target=self._consume_amqp, name='AmqpConsumerThread')
        self._consumer_stop = threading.Event()
        self._consumer_thread.start() 
Example #3
Source File: stream.py    From memex-explorer with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def __init__(self, crawl_name, num_urls=DEFAULT_NUM_URLS):
        """
        Create a NutchUrlTrails instance for visualizing a running Nutch crawl in real-time using Bokeh
        :param name: The name of the crawl (as identified by the queue)
        :param num_urls: The number of URLs to display in the visualization
        :return: A NutchUrLTrails instance
        """
        self.crawl_name = crawl_name
        self.num_urls = num_urls
        self.open_urls = {}
        self.closed_urls = {}
        self.old_segments = None
        self.old_circles = None
        
        self.session = Session()
        self.session.use_doc(self.crawl_name)
        self.document = Document()

        con = Connection()

        exchange = Exchange(EXCHANGE_NAME, 'direct', durable=False)
        queue = Queue(crawl_name, exchange=exchange, routing_key=crawl_name)
        self.queue = con.SimpleQueue(name=queue) 
Example #4
Source File: suite_helper.py    From MozDef with Mozilla Public License 2.0 6 votes vote down vote up
def setup_rabbitmq_client(options):
    global RABBITMQ_CLIENT
    try:
        RABBITMQ_CLIENT
    except NameError:
        mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(
            options.mquser,
            options.mqpassword,
            options.mqalertserver,
            options.mqport
        )
        mqAlertConn = Connection(mqConnString)
        alertExchange = Exchange(name=options.alertExchange, type='topic', durable=True, delivery_mode=1)
        alertExchange(mqAlertConn).declare()

        alertQueue = Queue(options.queueName,
                           exchange=alertExchange,
                           routing_key=options.alerttopic,
                           durable=False,
                           no_ack=(not options.mqack))
        alertQueue(mqAlertConn).declare()

        RABBITMQ_CLIENT = mqAlertConn.Consumer(alertQueue, accept=['json'])
    return RABBITMQ_CLIENT 
Example #5
Source File: main.py    From banzai with GNU General Public License v3.0 6 votes vote down vote up
def start_listener(runtime_context):
    # Need to keep the amqp logger level at least as high as INFO,
    # or else it send heartbeat check messages every second
    logging.getLogger('amqp').setLevel(max(logger.level, getattr(logging, 'INFO')))
    logger.info('Starting pipeline listener')

    fits_exchange = Exchange(runtime_context.FITS_EXCHANGE, type='fanout')
    listener = RealtimeModeListener(runtime_context)

    with Connection(runtime_context.broker_url) as connection:
        listener.connection = connection.clone()
        listener.queue = Queue(runtime_context.queue_name, fits_exchange)
        try:
            listener.run()
        except listener.connection.connection_errors:
            listener.connection = connection.clone()
            listener.ensure_connection(max_retries=10)
        except KeyboardInterrupt:
            logger.info('Shutting down pipeline listener.') 
Example #6
Source File: check.py    From data_integration_celery with GNU General Public License v3.0 6 votes vote down vote up
def test(url):
    from kombu import Exchange, Queue, Connection, Consumer, Producer
    task_queue = Queue('tasks', exchange=Exchange('celery', type='direct'), routing_key='tasks')
    # 生产者
    with Connection(url) as conn:
        with conn.channel() as channel:
            producer = Producer(channel)
            producer.publish({'hello': 'world'},
                             retry=True,
                             exchange=task_queue.exchange,
                             routing_key=task_queue.routing_key,
                             declare=[task_queue])

    def get_message(body, message):
        print("receive message: %s" % body)
        # message.ack()

    # 消费者
    with Connection(url) as conn:
        with conn.channel() as channel:
            consumer = Consumer(channel, queues=task_queue, callbacks=[get_message, ], prefetch_count=10)
            consumer.consume(no_ack=True) 
Example #7
Source File: amqp_source.py    From RackHD with Apache License 2.0 6 votes vote down vote up
def __init__(self, logs, connection, name, exchange, routing_key, queue_name):
        self.__logs = logs
        self.__ignore_some_stuff = False
        self.name = name
        self.__event_callbacks = []
        if queue_name is None:
            queue_name = ''
            exclusive = True
        else:
            exclusive = False
        chan = connection.channel()
        ex = Exchange(exchange, 'topic', channel=chan)
        queue = Queue(exchange=ex, routing_key=routing_key, exclusive=exclusive)
        consumer = Consumer(chan, queues=[queue], callbacks=[self.__message_cb])
        consumer.consume()
        self.exchange = ex 
Example #8
Source File: alerta_amqp.py    From alerta-contrib with MIT License 6 votes vote down vote up
def __init__(self, name=None):
        if app.config['DEBUG']:
            setup_logging(loglevel='DEBUG', loggers=[''])

        self.connection = BrokerConnection(AMQP_URL)
        try:
            self.connection.connect()
        except Exception as e:
            LOG.error('Failed to connect to AMQP transport %s: %s', AMQP_URL, e)
            raise RuntimeError

        self.channel = self.connection.channel()
        self.exchange_name = AMQP_TOPIC

        self.exchange = Exchange(name=self.exchange_name, type='fanout', channel=self.channel)
        self.producer = Producer(exchange=self.exchange, channel=self.channel)

        super(FanoutPublisher, self).__init__(name)

        LOG.info('Configured fanout publisher on topic "%s"', AMQP_TOPIC) 
Example #9
Source File: listener.py    From alerta-contrib with MIT License 6 votes vote down vote up
def get_consumers(self, Consumer, channel):
        exchange = Exchange(
            name=AMQP_TOPIC,
            type='fanout',
            channel=self.channel,
            durable=True
        )
        queues = [
            Queue(
                name='',
                exchange=exchange,
                routing_key='',
                channel=self.channel,
                exclusive=True
            )
        ]
        return [
            Consumer(queues=queues, accept=['json'], callbacks=[self.on_message])
        ] 
Example #10
Source File: mailer.py    From alerta-contrib with MIT License 6 votes vote down vote up
def get_consumers(self, Consumer, channel):

        exchange = Exchange(
            name=OPTIONS['amqp_topic'],
            type='fanout',
            channel=self.channel,
            durable=True
        )

        queues = [
            Queue(
                name=OPTIONS['amqp_queue_name'],
                exchange=exchange,
                routing_key='',
                channel=self.channel,
                exclusive=OPTIONS['amqp_queue_exclusive']
            )
        ]

        return [
            Consumer(queues=queues, accept=['json'],
                     callbacks=[self.on_message])
        ] 
Example #11
Source File: notify.py    From commissaire with GNU General Public License v3.0 6 votes vote down vote up
def connect(self, exchange, channel):  # pragma: no cover
        """
        Readies the StorageNotify for publishing notification messages by
        setting up a kombu.Producer.

        :param exchange: The exchange for publishing notifications.
        :type exchange: kombu.Exchange
        :param channel: The channel to bind to.
        :type channel: kombu.transport.base.StdChannel
        """
        name = self.__class__.__name__
        self.logger.debug('Connecting {}'.format(name))

        self._queue = kombu.Queue(exchange=exchange, channel=channel)
        self._queue.declare()

        self._producer = kombu.Producer(channel, exchange) 
Example #12
Source File: amqp_source.py    From RackHD with Apache License 2.0 5 votes vote down vote up
def test_helper_sync_send_msg(self, exchange, ex_rk, send_rk, payload):
        ex = Exchange(exchange, 'topic')
        queue = Queue(exchange=ex, routing_key=ex_rk + '.*', exclusive=True, channel=self.__connection)
        queue.declare()
        prod = Producer(self.__connection, exchange=ex, routing_key=send_rk)
        prod.publish(payload)
        return queue 
Example #13
Source File: amqp_source.py    From RackHD with Apache License 2.0 5 votes vote down vote up
def inject(self, exchange, routing_key, payload):
        self.__logs.irl.debug("Injecting a test AMQP message: ex=%s, rk=%s, payload=%s", exchange, routing_key, payload)
        if not isinstance(exchange, Exchange):
            exchange = Exchange(exchange, 'topic')
        prod = Producer(self.__connection, exchange=exchange, routing_key=routing_key)
        prod.publish(payload) 
Example #14
Source File: mozdefbot.py    From MozDef with Mozilla Public License 2.0 5 votes vote down vote up
def consume_alerts(bot):
    # connect and declare the message queue/kombu objects.
    # server/exchange/queue
    mq_conn_str = 'amqp://{0}:{1}@{2}:{3}//'.format(
        options.mq_user,
        options.mq_password,
        options.mq_alert_server,
        options.mq_port
    )
    mq_alert_conn = Connection(mq_conn_str)

    # Exchange for alerts we pass to plugins
    alert_exchange = Exchange(
        name=options.alert_exchange,
        type='topic',
        durable=True,
        delivery_mode=1
    )

    alert_exchange(mq_alert_conn).declare()

    # Queue for the exchange
    alert_queue = Queue(
        options.queue_name,
        exchange=alert_exchange,
        routing_key=options.alerttopic,
        durable=False,
        no_ack=(not options.mq_ack)
    )
    alert_queue(mq_alert_conn).declare()

    # consume our alerts.
    AlertConsumer(mq_alert_conn, alert_queue, alert_exchange, bot).run() 
Example #15
Source File: rackhd_amqp_od.py    From RackHD with Apache License 2.0 5 votes vote down vote up
def __assure_exchange(self, connection, exchange_name, exchange_type):
        exchange = Exchange(exchange_name, type=exchange_type)
        bound_exchange = exchange(connection)
        bound_exchange.declare() 
Example #16
Source File: amqp.py    From RackHD with Apache License 2.0 5 votes vote down vote up
def make_queue_obj(exchange, queue, routing_key, type='topic'):
    return Queue(queue, \
           Exchange(exchange, type=type), \
           routing_key=routing_key) 
Example #17
Source File: alerttask.py    From MozDef with Mozilla Public License 2.0 5 votes vote down vote up
def _configureKombu(self):
        """
        Configure kombu for amqp or sqs
        """
        try:
            connString = self.__build_conn_string()
            self.mqConn = kombu.Connection(connString)
            if connString.find('sqs') == 0:
                self.mqConn.transport_options['region'] = os.getenv('DEFAULT_AWS_REGION', 'us-west-2')
                self.mqConn.transport_options['is_secure'] = True
                self.alertExchange = kombu.Exchange(
                    name=RABBITMQ["alertexchange"], type="topic", durable=True
                )
                self.alertExchange(self.mqConn).declare()
                alertQueue = kombu.Queue(
                    os.getenv('OPTIONS_ALERTSQSQUEUEURL').split('/')[4], exchange=self.alertExchange
                )
            else:
                self.alertExchange = kombu.Exchange(
                    name=RABBITMQ["alertexchange"], type="topic", durable=True
                )
                self.alertExchange(self.mqConn).declare()
                alertQueue = kombu.Queue(
                    RABBITMQ["alertqueue"], exchange=self.alertExchange
                )
            alertQueue(self.mqConn).declare()
            self.mqproducer = self.mqConn.Producer(serializer="json")
            self.log.debug("Kombu configured")
        except Exception as e:
            self.log.error(
                "Exception while configuring kombu for alerts: {0}".format(e)
            ) 
Example #18
Source File: listener.py    From bugbug with Mozilla Public License 2.0 5 votes vote down vote up
def __init__(self, user, password, callback):
        self.connection = Connection(CONNECTION_URL.format(user, password))
        self.queues = [
            Queue(
                name="queue/{}/pushes".format(user),
                exchange=Exchange(
                    "exchange/hgpushes/v2", type="topic", no_declare=True,
                ),
                routing_key="#",
                durable=True,
                auto_delete=True,
            )
        ]
        self.consumer = _GenericConsumer(self.connection, self.queues, callback) 
Example #19
Source File: alert_actions_worker.py    From MozDef with Mozilla Public License 2.0 5 votes vote down vote up
def main():
    # connect and declare the message queue/kombu objects.
    # Event server/exchange/queue
    mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(
        options.mquser,
        options.mqpassword,
        options.mqalertserver,
        options.mqport
    )
    mqAlertConn = Connection(mqConnString)

    # Exchange for alerts we pass to actions
    alertExchange = Exchange(name=options.alertExchange,
                             type='topic',
                             durable=True,
                             delivery_mode=1)

    alertExchange(mqAlertConn).declare()

    # Queue for the exchange
    alertQueue = Queue(options.queueName,
                       exchange=alertExchange,
                       routing_key=options.alerttopic,
                       durable=False,
                       no_ack=(not options.mqack))
    alertQueue(mqAlertConn).declare()

    # consume our alerts.
    alertConsumer(mqAlertConn, alertQueue, alertExchange).run() 
Example #20
Source File: kombu_manager.py    From python-socketio with MIT License 5 votes vote down vote up
def _exchange(self):
        options = {'type': 'fanout', 'durable': False}
        options.update(self.exchange_options)
        return kombu.Exchange(self.channel, **options) 
Example #21
Source File: queue_consumer.py    From st2 with Apache License 2.0 5 votes vote down vote up
def main(queue, exchange, routing_key='#'):
    exchange = Exchange(exchange, type='topic')
    queue = Queue(name=queue, exchange=exchange, routing_key=routing_key,
                  auto_delete=True)

    with transport_utils.get_connection() as connection:
        connection.connect()
        watcher = QueueConsumer(connection=connection, queue=queue)
        watcher.run() 
Example #22
Source File: queue_producer.py    From st2 with Apache License 2.0 5 votes vote down vote up
def main(exchange, routing_key, payload):
    exchange = Exchange(exchange, type='topic')
    publisher = PoolPublisher()
    publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key)
    eventlet.sleep(0.5) 
Example #23
Source File: pulse.py    From jx-sqlite with Mozilla Public License 2.0 5 votes vote down vote up
def send(self, topic, message):
        """Publishes a pulse message to the proper exchange."""

        if not message:
            Log.error("Expecting a message")

        message._prepare()

        if not self.connection:
            self.connect()

        producer = Producer(
            channel=self.connection,
            exchange=Exchange(self.settings.exchange, type='topic'),
            routing_key=topic
        )

        # The message is actually a simple envelope format with a payload and
        # some metadata.
        final_data = Data(
            payload=message.data,
            _meta=set_default({
                'exchange': self.settings.exchange,
                'routing_key': message.routing_key,
                'serializer': self.settings.serializer,
                'sent': time_to_string(datetime.datetime.now(timezone(self.settings.broker_timezone))),
                'count': self.count
            }, message.metadata)
        )

        producer.publish(jsons.scrub(final_data), serializer=self.settings.serializer)
        self.count += 1 
Example #24
Source File: rabbit.py    From sfm-ui with MIT License 5 votes vote down vote up
def __init__(self):
        self.exchange = Exchange(name=EXCHANGE,
                                 type="topic",
                                 durable=True) 
Example #25
Source File: redis.py    From dino with Apache License 2.0 5 votes vote down vote up
def __init__(self, env, is_external_queue: bool):
        super().__init__(env, is_external_queue, queue_type='redis', logger=logger)

        conf = env.config

        bind_port = self.get_port()
        if bind_port is None:
            logger.info('skipping pubsub setup, no port specified')
            return

        queue_host = conf.get(ConfigKeys.HOST, domain=self.domain_key, default=None)
        exchange = conf.get(ConfigKeys.EXCHANGE, domain=self.domain_key, default='node_exchange')
        queue_db = conf.get(ConfigKeys.DB, domain=self.domain_key, default=0)
        queue_name = conf.get(ConfigKeys.QUEUE, domain=self.domain_key, default=None)

        if queue_name is None or len(queue_name.strip()) == 0:
            queue_name = 'node_queue_%s_%s_%s' % (
                conf.get(ConfigKeys.ENVIRONMENT),
                self.get_host(),
                bind_port
            )

        if self.is_external_queue:
            self.exchange = Exchange(exchange, type='direct')
        else:
            self.exchange = Exchange(exchange, type='fanout')

        self.queue_connection = Connection(queue_host, transport_options={'db': queue_db})
        logger.info('queue connection: {}'.format(str(self.queue_connection)))
        self.queue_name = queue_name
        self.queue = Queue(self.queue_name, self.exchange) 
Example #26
Source File: celery.py    From pspider with MIT License 5 votes vote down vote up
def init_sdks():
    from app.register import _all_sdk_
    from app import tasks
    queues = []
    for s in _all_sdk_:
        s.app = capp
        name = s.__str__()
        log.info("load %s", name)
        tasks.__dict__[name] =  s.ptask(name, rate_limit='10/m')
        queues.append(Queue(name, exchange=Exchange(name, type='direct'), routing_key=name))

    capp.conf.update(
        CELERY_QUEUES=queues
    )
    capp.conf.update(CELERY_INCLUDE=['app.tasks']) 
Example #27
Source File: file_utils.py    From banzai with GNU General Public License v3.0 5 votes vote down vote up
def post_to_archive_queue(image_path, broker_url, exchange_name='fits_files'):
    exchange = Exchange(exchange_name, type='fanout')
    with Connection(broker_url) as conn:
        producer = conn.Producer(exchange=exchange)
        producer.publish({'path': image_path})
        producer.release() 
Example #28
Source File: celery_test_utils.py    From celery-prometheus-exporter with MIT License 5 votes vote down vote up
def get_celery_app(queue=None):
    app = celery.Celery(broker='memory://', backend='cache+memory://')

    if queue:
        app.conf.update(
            CELERY_DEFAULT_QUEUE=queue,
            CELERY_QUEUES=(
                Queue(queue, exchange=Exchange(queue, type='direct'), routing_key=queue),
            ),
            CELERY_ROUTES={
                'task1': {'queue': queue, 'routing_key': queue},
            }
        )

    return app 
Example #29
Source File: queue_manager.py    From GloboNetworkAPI with Apache License 2.0 5 votes vote down vote up
def send(self):

        try:
            # Connection
            conn = Connection(self.broker)

            # Channel
            channel = conn.channel()

            # Exchange
            task_exchange = Exchange(self._exchange_name,
                                     type=self._queue_type)

            # Queues
            if self._queue_name:
                queue = Queue(name=self._queue_name, channel=channel,
                              exchange=task_exchange,
                              routing_key=self._routing_key)
                queue.declare()

            # Producer
            producer = Producer(exchange=task_exchange, channel=channel,
                                routing_key=self._routing_key)

            # Send message
            for message in self._msgs:
                serialized_message = json.dumps(message, ensure_ascii=False)
                producer.publish(serialized_message)

            conn.close()

        except Exception, e:

            self.log.error(
                u'QueueManagerError - Error on sending objects from queue.')
            self.log.debug(e)
            raise Exception(
                'QueueManagerError - Error on sending objects to queue.') 
Example #30
Source File: kombu.py    From zentral with Apache License 2.0 5 votes vote down vote up
def extra_context(self, connection, channel):
        # TODO: migration! remove ?
        logger.info("PreprocessWorker migration")
        for routing_key in self.preprocessors.keys():
            legacy_exchange = Exchange(routing_key, type='fanout', channel=channel, durable=True)
            legacy_exchange.delete()
        yield