Python twisted.test.proto_helpers.MemoryReactorClock() Examples

The following are 30 code examples of twisted.test.proto_helpers.MemoryReactorClock(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module twisted.test.proto_helpers , or try the search function .
Example #1
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 6 votes vote down vote up
def test_endpointConnectNonDefaultArgs(self):
        """
        The endpoint should pass it's connectArgs parameter to the reactor's
        listen methods.
        """
        factory = object()

        mreactor = MemoryReactor()

        ep, expectedArgs, ignoredHost = self.createClientEndpoint(
            mreactor, factory,
            **self.connectArgs())

        ep.connect(factory)

        expectedClients = self.expectedClients(mreactor)

        self.assertEqual(len(expectedClients), 1)
        self.assertConnectArgs(expectedClients[0], expectedArgs) 
Example #2
Source File: test_producer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_producer_stop_during_request(self):
        """
        Test stopping producer while it's waiting for reply from client
        """
        clock = MemoryReactorClock()
        client = Mock(reactor=clock)
        f = Failure(BrokerNotAvailableError())
        ret = [fail(f), Deferred()]
        client.send_produce_request.side_effect = ret
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]
        batch_n = 2

        producer = Producer(client, batch_every_n=batch_n, batch_send=True)
        d = producer.send_messages(self.topic, msgs=msgs)
        # At first, there's no result. Have to retry due to first failure
        self.assertNoResult(d)
        clock.advance(producer._retry_interval)

        producer.stop()
        self.failureResultOf(d, tid_CancelledError) 
Example #3
Source File: test_producer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_producer_send_messages_unknown_topic(self):
        clock = MemoryReactorClock()
        client = Mock(reactor=clock)
        ds = [Deferred() for _ in range(Producer.DEFAULT_REQ_ATTEMPTS)]
        client.load_metadata_for_topics.side_effect = ds
        client.metadata_error_for_topic.return_value = 3
        client.topic_partitions = {}
        msgs = [self.msg("one"), self.msg("two")]
        ack_timeout = 5

        producer = Producer(client, ack_timeout=ack_timeout)
        d = producer.send_messages(self.topic, msgs=msgs)
        # d is waiting on result from ds[0] for load_metadata_for_topics
        self.assertNoResult(d)

        # fire it with client still reporting no metadata for topic
        # The producer will retry the lookup DEFAULT_REQ_ATTEMPTS times...
        for i in range(Producer.DEFAULT_REQ_ATTEMPTS):
            ds[i].callback(None)
            # And then wait producer._retry_interval for a call back...
            clock.advance(producer._retry_interval + 0.01)
        self.failureResultOf(d, UnknownTopicOrPartitionError)
        self.assertFalse(client.send_produce_request.called)

        producer.stop() 
Example #4
Source File: test_producer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_producer_stop_waiting_to_retry(self):
        """
        Test stopping producer while it's waiting to retry a request
        """
        clock = MemoryReactorClock()
        client = Mock(reactor=clock)
        f = Failure(BrokerNotAvailableError())
        ret = [fail(f)]
        client.send_produce_request.side_effect = ret
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]
        batch_n = 2

        producer = Producer(client, batch_every_n=batch_n, batch_send=True)
        d = producer.send_messages(self.topic, msgs=msgs)
        # At first, there's no result. Have to retry due to first failure
        self.assertNoResult(d)
        # Advance the clock, some, but not enough to retry
        clock.advance(producer._retry_interval / 2)
        # Stop the producer before the retry
        producer.stop()
        self.failureResultOf(d, tid_CancelledError) 
Example #5
Source File: test_client.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_send_broker_unaware_request_bootstrap_fail(self):
        """
        Broker unaware requests fail with `KafkaUnavailableError` when boostrap
        fails.

        This scenario makes two connection attempts in random order, one for
        each configured bootstrap host. Both fail.
        """
        client = KafkaClient(
            hosts=['kafka01:9092', 'kafka02:9092'],
            reactor=MemoryReactorClock(),
            # Every connection attempt will immediately fail due to this
            # endpoint, including attempts to bootstrap.
            endpoint_factory=FailureEndpoint,
        )

        d = client.load_metadata_for_topics('sometopic')

        self.failureResultOf(d).trap(KafkaUnavailableError) 
Example #6
Source File: test_consumer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_consumer_stop_during_fetch_retry(self):
        fetch_ds = [Deferred()]
        clock = MemoryReactorClock()
        mockclient = Mock(reactor=clock)
        mockclient.send_fetch_request.side_effect = fetch_ds
        consumer = Consumer(mockclient, 'committedTopic', 11, "FakeProc",
                            consumer_group="myGroup")
        d = consumer.start(0)
        with patch.object(kconsumer, 'log') as klog:
            f = Failure(UnknownError())
            fetch_ds[0].errback(f)
            klog.debug.assert_called_once_with(
                "%r: Failure fetching messages from kafka: %r",
                consumer, f)
        self.assertIsNone(consumer.stop())
        self.assertIsNone(self.successResultOf(d)) 
Example #7
Source File: test_agent.py    From Safejumper-for-Desktop with GNU General Public License v2.0 6 votes vote down vote up
def createReactor(self):
        """
        Create a L{MemoryReactorClock} and give it some hostnames it can
        resolve.

        @return: a L{MemoryReactorClock}-like object with a slightly limited
            interface (only C{advance} and C{tcpClients} in addition to its
            formally-declared reactor interfaces), which can resolve a fixed
            set of domains.
        """
        mrc = MemoryReactorClock()
        drr = deterministicResolvingReactor(mrc, hostMap={
            u'example.com': [EXAMPLE_COM_IP],
            u'ipv6.example.com': [EXAMPLE_COM_V6_IP],
            u'example.net': [EXAMPLE_NET_IP],
            u'example.org': [EXAMPLE_ORG_IP],
            u'foo': [FOO_LOCAL_IP],
            u'foo.com': [FOO_COM_IP],
        })

        # Lots of tests were written expecting MemoryReactorClock and the
        # reactor seen by the SUT to be the same object.
        drr.tcpClients = mrc.tcpClients
        drr.advance = mrc.advance
        return drr 
Example #8
Source File: test_agent.py    From Safejumper-for-Desktop with GNU General Public License v2.0 6 votes vote down vote up
def test_deprecatedDuckPolicy(self):
        """
        Passing something that duck-types I{like} a L{web client context
        factory <twisted.web.client.WebClientContextFactory>} - something that
        does not provide L{IPolicyForHTTPS} - to L{Agent} emits a
        L{DeprecationWarning} even if you don't actually C{import
        WebClientContextFactory} to do it.
        """
        def warnMe():
            client.Agent(MemoryReactorClock(),
                         "does-not-provide-IPolicyForHTTPS")
        warnMe()
        warnings = self.flushWarnings([warnMe])
        self.assertEqual(len(warnings), 1)
        [warning] = warnings
        self.assertEqual(warning['category'], DeprecationWarning)
        self.assertEqual(
            warning['message'],
            "'does-not-provide-IPolicyForHTTPS' was passed as the HTTPS "
            "policy for an Agent, but it does not provide IPolicyForHTTPS.  "
            "Since Twisted 14.0, you must pass a provider of IPolicyForHTTPS."
        ) 
Example #9
Source File: test_producer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_producer_cancel_request_in_batch(self):
        # Test cancelling a request before it's begun to be processed
        client = Mock(reactor=MemoryReactorClock())
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]
        msgs2 = [self.msg("three"), self.msg("four")]
        batch_n = 3

        producer = Producer(client, batch_every_n=batch_n, batch_send=True)
        d1 = producer.send_messages(self.topic, msgs=msgs)
        # Check that no request was sent
        self.assertFalse(client.send_produce_request.called)
        d1.cancel()
        self.failureResultOf(d1, CancelledError)
        d2 = producer.send_messages(self.topic, msgs=msgs2)
        # Check that still no request was sent
        self.assertFalse(client.send_produce_request.called)
        self.assertNoResult(d2)

        producer.stop() 
Example #10
Source File: test_consumer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_consumer_error_during_offset(self):
        topic = 'error_during_offset'
        part = 991
        reqs_ds = [Deferred(), Deferred()]
        clock = MemoryReactorClock()
        mockclient = Mock(reactor=clock)
        mockclient.send_offset_request.side_effect = reqs_ds
        consumer = Consumer(mockclient, topic, part, Mock())
        d = consumer.start(OFFSET_LATEST)
        # Make sure request for offset was made
        request = OffsetRequest(topic, part, OFFSET_LATEST, 1)
        mockclient.send_offset_request.assert_called_once_with([request])
        # Errback the first request
        f = Failure(KafkaUnavailableError())  # Perhaps kafka wasn't up yet...
        with patch.object(kconsumer, 'log'):
            reqs_ds[0].errback(f)
        # Advance the clock to trigger the 2nd request
        clock.advance(consumer.retry_delay + 1)  # fire the callLater
        self.assertEqual(2, mockclient.send_offset_request.call_count)

        # Stop the consumer to cleanup any outstanding operations
        self.assertIsNone(consumer.stop())
        self.assertIsNone(self.successResultOf(d)) 
Example #11
Source File: test_producer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_producer_complete_batch_send_unexpected_error(self):
        # Purely for coverage
        client = Mock(reactor=MemoryReactorClock())
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        e = ValueError('test_producer_complete_batch_send_unexpected_error')
        client.send_produce_request.side_effect = e
        msgs = [self.msg("one"), self.msg("two")]

        producer = Producer(client)
        # FIXME: Don't use patch to test logging
        with patch.object(aProducer, 'log') as klog:
            producer.send_messages(self.topic, msgs=msgs)
            # The error 'e' gets wrapped in a failure with a traceback, so
            # we can't easily match the call exactly...
            klog.error.assert_called_once_with(
                'Failure detected in _complete_batch_send: %r', ANY, exc_info=ANY)

        producer.stop() 
Example #12
Source File: test_consumer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_consumer_commit_during_commit(self):
        clock = MemoryReactorClock()
        mockclient = Mock(reactor=clock)
        return_value = Deferred()
        mockclient.send_offset_commit_request.return_value = return_value
        the_group = 'The Cure'
        the_topic = 'test_consumer_commit_during_commit_topic'
        the_part = 1
        the_offset = 28616
        the_request = OffsetCommitRequest(
            the_topic, the_part, the_offset, TIMESTAMP_INVALID, None)
        # Create a consumer and muck with the state a bit...
        consumer = Consumer(mockclient, the_topic, the_part, Mock(), the_group)
        consumer._last_processed_offset = the_offset  # Fake processed msgs
        consumer._commit_looper = Mock()  # Mock a looping call to test reset
        d1 = consumer.commit()
        mockclient.send_offset_commit_request.assert_called_once_with(
            the_group, [the_request], consumer_id='', group_generation_id=-1)
        consumer._commit_looper.reset.assert_called_once_with()
        self.assertFalse(d1.called)
        d2 = consumer.commit()
        self.failureResultOf(d2, OperationInProgress) 
Example #13
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 6 votes vote down vote up
def test_endpointListenSuccess(self):
        """
        An endpoint can listen and returns a deferred that gets called back
        with a port instance.
        """
        mreactor = MemoryReactor()

        factory = object()

        ep, expectedArgs, expectedHost = self.createServerEndpoint(
            mreactor, factory)

        d = ep.listen(factory)

        receivedHosts = []

        def checkPortAndServer(port):
            receivedHosts.append(port.getHost())

        d.addCallback(checkPortAndServer)

        self.assertEqual(receivedHosts, [expectedHost])
        self.assertEqual(self.expectedServers(mreactor), [expectedArgs]) 
Example #14
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 6 votes vote down vote up
def test_endpointListenNonDefaultArgs(self):
        """
        The endpoint should pass it's listenArgs parameter to the reactor's
        listen methods.
        """
        factory = object()

        mreactor = MemoryReactor()

        ep, expectedArgs, ignoredHost = self.createServerEndpoint(
            mreactor, factory,
            **self.listenArgs())

        ep.listen(factory)

        expectedServers = self.expectedServers(mreactor)

        self.assertEqual(expectedServers, [expectedArgs]) 
Example #15
Source File: test_producer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_producer_send_messages_no_retry_fail(self):
        client = Mock(reactor=MemoryReactorClock())
        f = Failure(BrokerNotAvailableError())
        client.send_produce_request.side_effect = [fail(f)]
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]

        producer = Producer(client, max_req_attempts=1)
        d = producer.send_messages(self.topic, msgs=msgs)
        # Check the expected request was sent
        msgSet = create_message_set(
            make_send_requests(msgs), producer.codec)
        req = ProduceRequest(self.topic, 0, msgSet)
        client.send_produce_request.assert_called_once_with(
            [req], acks=producer.req_acks, timeout=producer.ack_timeout,
            fail_on_error=False)
        self.failureResultOf(d, BrokerNotAvailableError)

        producer.stop() 
Example #16
Source File: test_consumer.py    From afkak with Apache License 2.0 6 votes vote down vote up
def test_consumer_shutdown_nothing_processing(self):
        """
        Test the consumer shutdown happy path when no messages are currently
        being processed by the processor function (while waiting on fetch req).
        """
        clock = MemoryReactorClock()
        mockclient = Mock(reactor=clock)
        mockproc = Mock()
        consumer = Consumer(mockclient, 'snpTopic', 1, mockproc, 'snpGroup')
        start_d = consumer.start(1)
        # Ensure a fetch request was made
        request = FetchRequest('snpTopic', 1, 1, consumer.buffer_size)
        mockclient.send_fetch_request.assert_called_once_with(
            [request], max_wait_time=consumer.fetch_max_wait_time,
            min_bytes=consumer.fetch_min_bytes)
        # Shutdown the consumer
        shutdown_d = consumer.shutdown()
        # Ensure the stop was signaled
        self.assertIsNone(self.successResultOf(start_d))
        # Ensure the shutdown was signaled
        self.assertIsNone(self.successResultOf(shutdown_d))
        # Ensure the processor was never called
        self.assertFalse(mockproc.called) 
Example #17
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 5 votes vote down vote up
def test_interface(self):
        """
        The endpoint provides L{interfaces.IStreamServerEndpoint}.
        """
        factory = object()
        ep, ignoredArgs, ignoredDest = self.createServerEndpoint(
            MemoryReactor(), factory)
        self.assertTrue(verifyObject(interfaces.IStreamServerEndpoint, ep)) 
Example #18
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 5 votes vote down vote up
def test_idnaHostnameText(self):
        """
        A L{HostnameEndpoint} constructed with text will contain an
        IDNA-encoded bytes representation of that text.
        """
        endpoint = endpoints.HostnameEndpoint(
            deterministicResolvingReactor(MemoryReactor(), ['127.0.0.1']),
            self.sampleIDNAText, 80
        )
        self.assertEqual(endpoint._hostBytes, self.sampleIDNABytes)
        self.assertEqual(endpoint._hostText, self.sampleIDNAText) 
Example #19
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 5 votes vote down vote up
def test_endpointConnectingCancelled(self, advance=None):
        """
        Calling L{Deferred.cancel} on the L{Deferred} returned from
        L{IStreamClientEndpoint.connect} will cause it to be errbacked with a
        L{ConnectingCancelledError} exception.
        """
        mreactor = MemoryReactor()

        clientFactory = protocol.Factory()
        clientFactory.protocol = protocol.Protocol

        ep, ignoredArgs, address = self.createClientEndpoint(
            mreactor, clientFactory)

        d = ep.connect(clientFactory)
        if advance is not None:
            mreactor.advance(advance)
        d.cancel()
        # When canceled, the connector will immediately notify its factory that
        # the connection attempt has failed due to a UserError.
        attemptFactory = self.retrieveConnectedFactory(mreactor)
        attemptFactory.clientConnectionFailed(None, Failure(error.UserError()))
        # This should be a feature of MemoryReactor: <http://tm.tl/5630>.

        failure = self.failureResultOf(d)

        self.assertIsInstance(failure.value, error.ConnectingCancelledError)
        self.assertEqual(failure.value.address, address)
        self.assertTrue(mreactor.tcpClients[0][2]._connector.stoppedConnecting)
        self.assertEqual([], mreactor.getDelayedCalls()) 
Example #20
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 5 votes vote down vote up
def test_endpointConnectSuccessAfterIteration(self):
        """
        If a connection attempt initiated by
        L{HostnameEndpoint.connect} succeeds only after
        L{HostnameEndpoint} has exhausted the list of possible server
        addresses, the returned L{Deferred} will fire with the
        connected protocol instance and the endpoint will leave no
        delayed calls in the reactor.
        """
        proto = object()
        mreactor = MemoryReactor()

        clientFactory = object()

        ep, expectedArgs, ignoredDest = self.createClientEndpoint(
            mreactor, clientFactory)

        d = ep.connect(clientFactory)

        receivedProtos = []

        def checkProto(p):
            receivedProtos.append(p)

        d.addCallback(checkProto)

        factory = self.retrieveConnectedFactory(mreactor)

        mreactor.advance(0.3)

        factory._onConnection.callback(proto)
        self.assertEqual(receivedProtos, [proto])

        expectedClients = self.expectedClients(mreactor)

        self.assertEqual(len(expectedClients), 1)
        self.assertConnectArgs(expectedClients[0], expectedArgs)
        self.assertEqual([], mreactor.getDelayedCalls()) 
Example #21
Source File: test_endpoints.py    From Safejumper-for-Desktop with GNU General Public License v2.0 5 votes vote down vote up
def test_endpointConnectingCancelled(self):
        """
        Calling L{Deferred.cancel} on the L{Deferred} returned from
        L{IStreamClientEndpoint.connect} is errbacked with an expected
        L{ConnectingCancelledError} exception.
        """
        mreactor = MemoryReactor()
        clientFactory = protocol.Factory()
        clientFactory.protocol = protocol.Protocol

        ep, ignoredArgs, address = self.createClientEndpoint(
            deterministicResolvingReactor(mreactor, ['127.0.0.1']),
            clientFactory
        )

        d = ep.connect(clientFactory)
        d.cancel()
        # When canceled, the connector will immediately notify its factory that
        # the connection attempt has failed due to a UserError.
        attemptFactory = self.retrieveConnectedFactory(mreactor)
        attemptFactory.clientConnectionFailed(None, Failure(error.UserError()))
        # This should be a feature of MemoryReactor: <http://tm.tl/5630>.

        failure = self.failureResultOf(d)

        self.assertIsInstance(failure.value, error.ConnectingCancelledError)
        self.assertEqual(failure.value.address, address)
        self.assertTrue(mreactor.tcpClients[0][2]._connector.stoppedConnecting)
        self.assertEqual([], mreactor.getDelayedCalls()) 
Example #22
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_non_integral_batch_every_b(self):
        client = Mock(reactor=MemoryReactorClock())
        with self.assertRaises(TypeError):
            producer = Producer(client, batch_send=True, batch_every_b="10")
            producer.__repr__()  # pragma: no cover  # STFU pyflakes 
Example #23
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_cancel_one_request_getting_topic(self):
        # Test cancelling a request after it's begun to be processed
        client = Mock(reactor=MemoryReactorClock())
        client.topic_partitions = {}
        ret = Deferred()
        client.load_metadata_for_topics.return_value = ret
        msgs = [self.msg("one"), self.msg("two")]
        msgs2 = [self.msg("three"), self.msg("four")]
        batch_n = 4

        producer = Producer(client, batch_every_n=batch_n, batch_send=True)
        d1 = producer.send_messages(self.topic, msgs=msgs)
        # Check that no request was sent
        self.assertFalse(client.send_produce_request.called)
        # This will trigger the metadata lookup
        d2 = producer.send_messages(self.topic, msgs=msgs2)
        d1.cancel()
        self.failureResultOf(d1, CancelledError)
        # Check that still no request was sent
        self.assertFalse(client.send_produce_request.called)
        self.assertNoResult(d2)
        # Setup the client's topics and trigger the metadata deferred
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        ret.callback(None)
        # Expect that only the msgs2 messages were sent
        msgSet = create_message_set(
            make_send_requests(msgs2), producer.codec)
        req = ProduceRequest(self.topic, 1, msgSet)
        client.send_produce_request.assert_called_once_with(
            [req], acks=producer.req_acks, timeout=producer.ack_timeout,
            fail_on_error=False)

        producer.stop() 
Example #24
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_cancel_getting_topic(self):
        # Test cancelling while waiting to retry getting metadata
        clock = MemoryReactorClock()
        client = Mock(reactor=clock)
        client.topic_partitions = {}  # start with no metadata
        rets = [Deferred(), Deferred()]
        client.load_metadata_for_topics.side_effect = rets
        msgs = [self.msg("one"), self.msg("two")]

        producer = Producer(client)
        d1 = producer.send_messages(self.topic, msgs=msgs)
        # Check that no request was sent
        self.assertFalse(client.send_produce_request.called)
        # Fire the result of load_metadata_for_topics, but
        # metadata_error_for_topic is still True, so it'll retry after delay
        # Advance the clock, some, but not enough to retry
        rets[0].callback(None)
        # Advance to partway thru the delay
        clock.advance(producer._retry_interval / 2)

        # Cancel the request and ake sure we got the CancelledError
        d1.cancel()
        self.failureResultOf(d1, CancelledError)
        # Check that still no request was sent
        self.assertFalse(client.send_produce_request.called)

        # Setup the client's topics and trigger the metadata deferred
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        rets[1].callback(None)
        # Check that still no request was sent
        self.assertFalse(client.send_produce_request.called)
        # Advance the clock again to complete the delay
        clock.advance(producer._retry_interval)
        # Make sure the retry got reset
        self.assertEqual(producer._retry_interval,
                         producer._init_retry_interval)
        producer.stop() 
Example #25
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_send_messages_batched(self):
        clock = MemoryReactorClock()
        client = Mock(reactor=clock)
        f = Failure(BrokerNotAvailableError())
        ret = [fail(f), succeed([ProduceResponse(self.topic, 0, 0, 10)])]
        client.send_produce_request.side_effect = ret
        client.topic_partitions = {self.topic: [0, 1, 2, 3]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]
        batch_n = 2

        producer = Producer(client, batch_every_n=batch_n, batch_send=True)
        d = producer.send_messages(self.topic, msgs=msgs)
        # Check the expected request was sent
        msgSet = create_message_set(
            make_send_requests(msgs), producer.codec)
        req = ProduceRequest(self.topic, ANY, msgSet)
        client.send_produce_request.assert_called_once_with(
            [req], acks=producer.req_acks, timeout=producer.ack_timeout,
            fail_on_error=False)
        # At first, there's no result. Have to retry due to first failure
        self.assertNoResult(d)
        clock.advance(producer._retry_interval)
        self.successResultOf(d)

        producer.stop() 
Example #26
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_send_messages_None_for_null_msg(self):
        first_part = 23
        client = Mock(reactor=MemoryReactorClock())
        ret = Deferred()
        client.send_produce_request.return_value = ret
        client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), None, self.msg("two")]
        ack_timeout = 5

        producer = Producer(client, ack_timeout=ack_timeout)
        d = producer.send_messages(self.topic, msgs=msgs)
        # Check the expected request was sent
        msgSet = create_message_set(
            make_send_requests(msgs), producer.codec)
        req = ProduceRequest(self.topic, first_part, msgSet)
        client.send_produce_request.assert_called_once_with(
            [req], acks=producer.req_acks, timeout=ack_timeout,
            fail_on_error=False)
        # Check results when "response" fires
        self.assertNoResult(d)
        resp = [ProduceResponse(self.topic, first_part, 0, 10)]
        ret.callback(resp)
        result = self.successResultOf(d)
        self.assertEqual(result, resp[0])
        producer.stop() 
Example #27
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_send_messages_no_acks(self):
        first_part = 19
        client = Mock(reactor=MemoryReactorClock())
        ret = Deferred()
        client.send_produce_request.return_value = ret
        client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]
        ack_timeout = 5

        producer = Producer(client, ack_timeout=ack_timeout,
                            req_acks=PRODUCER_ACK_NOT_REQUIRED)
        d = producer.send_messages(self.topic, msgs=msgs)
        # Check the expected request was sent
        msgSet = create_message_set(
            make_send_requests(msgs), producer.codec)
        req = ProduceRequest(self.topic, first_part, msgSet)
        client.send_produce_request.assert_called_once_with(
            [req], acks=producer.req_acks, timeout=ack_timeout,
            fail_on_error=False)
        # Check results when "response" fires
        self.assertNoResult(d)
        ret.callback([])
        result = self.successResultOf(d)
        self.assertEqual(result, None)
        producer.stop() 
Example #28
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_producer_send_messages(self):
        first_part = 23
        client = Mock(reactor=MemoryReactorClock())
        ret = Deferred()
        client.send_produce_request.return_value = ret
        client.topic_partitions = {self.topic: [first_part, 101, 102, 103]}
        client.metadata_error_for_topic.return_value = False
        msgs = [self.msg("one"), self.msg("two")]
        ack_timeout = 5

        producer = Producer(client, ack_timeout=ack_timeout)
        d = producer.send_messages(self.topic, msgs=msgs)
        # Check the expected request was sent
        msgSet = create_message_set(
            make_send_requests(msgs), producer.codec)
        req = ProduceRequest(self.topic, first_part, msgSet)
        client.send_produce_request.assert_called_once_with(
            [req], acks=producer.req_acks, timeout=ack_timeout,
            fail_on_error=False)
        # Check results when "response" fires
        self.assertNoResult(d)
        resp = [ProduceResponse(self.topic, first_part, 0, 10)]
        ret.callback(resp)
        result = self.successResultOf(d)
        self.assertEqual(result, resp[0])
        producer.stop() 
Example #29
Source File: test_producer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def setUp(self):
        client = Mock(reactor=MemoryReactorClock())
        self.producer = Producer(client)
        self.addCleanup(self.producer.stop) 
Example #30
Source File: test_consumer.py    From afkak with Apache License 2.0 5 votes vote down vote up
def test_consumer_shutdown_when_not_started(self):
        """
        Test the consumer shutdown when the consumer was never started
        """
        clock = MemoryReactorClock()
        mockclient = Mock(reactor=clock)
        mockproc = Mock()
        consumer = Consumer(mockclient, 'cswns', 1, mockproc)
        shutdown_d = consumer.shutdown()
        the_fail = self.failureResultOf(shutdown_d, RestopError)
        self.assertEqual(
            the_fail.value.args,
            ("Shutdown called on non-running consumer",))