Python kafka.errors.KafkaTimeoutError() Examples
The following are 10
code examples of kafka.errors.KafkaTimeoutError().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
kafka.errors
, or try the search function
.
Example #1
Source File: kafka_logs_producer.py From quay with Apache License 2.0 | 8 votes |
def send(self, logentry): try: # send() has a (max_block_ms) timeout and get() has a (max_block_ms) timeout # for an upper bound of 2x(max_block_ms) before guaranteed delivery future = self._producer.send( self.topic, logentry.to_dict(), timestamp_ms=epoch_ms(logentry.datetime) ) record_metadata = future.get(timeout=self.max_block_ms) assert future.succeeded except KafkaTimeoutError as kte: logger.exception("KafkaLogsProducer timeout sending log to Kafka: %s", kte) raise LogSendException("KafkaLogsProducer timeout sending log to Kafka: %s" % kte) except KafkaError as ke: logger.exception("KafkaLogsProducer error sending log to Kafka: %s", ke) raise LogSendException("KafkaLogsProducer error sending log to Kafka: %s" % ke) except Exception as e: logger.exception("KafkaLogsProducer exception sending log to Kafka: %s", e) raise LogSendException("KafkaLogsProducer exception sending log to Kafka: %s" % e)
Example #2
Source File: kafka.py From py-timeexecution with Apache License 2.0 | 6 votes |
def write(self, name, **data): """ Write the metric to kafka Args: name (str): The name of the metric to write data (dict): Additional data to store with the metric """ data["name"] = name if not ("timestamp" in data): data["timestamp"] = datetime.utcnow() try: self.producer.send(topic=self.topic, value=data) self.producer.flush() except (KafkaTimeoutError, NoBrokersAvailable) as exc: logger.warning('writing metric %r failure %r', data, exc)
Example #3
Source File: pipelines.py From scrapy-cluster with MIT License | 5 votes |
def process_item(self, item, spider): try: self.logger.debug("Processing item in KafkaPipeline") datum = dict(item) datum["timestamp"] = self._get_time() prefix = self.topic_prefix try: if self.use_base64: datum['body'] = base64.b64encode(bytes(datum['body'], 'utf-8')) message = ujson.dumps(datum, sort_keys=True) except: message = 'json failed to parse' firehose_topic = "{prefix}.crawled_firehose".format(prefix=prefix) future = self.producer.send(firehose_topic, message) future.add_callback(self._kafka_success, datum, spider) future.add_errback(self._kafka_failure, datum, spider) if self.appid_topics: appid_topic = "{prefix}.crawled_{appid}".format( prefix=prefix, appid=datum["appid"]) future2 = self.producer.send(appid_topic, message) future2.add_callback(self._kafka_success, datum, spider) future2.add_errback(self._kafka_failure, datum, spider) except KafkaTimeoutError: self.logger.warning("Caught KafkaTimeoutError exception") return item
Example #4
Source File: test_pipelines.py From scrapy-cluster with MIT License | 5 votes |
def test_process_item(self, e): item = self._get_item() spider = MagicMock() spider.name = "link" # test normal send, no appid topics self.pipe.process_item(item, spider) expected = '{"appid":"app","attrs":{},"body":"text","crawlid":"crawlid","links":[],"request_headers":{},"response_headers":{},"response_url":"http:\\/\\/dumb.com","status_code":200,"status_msg":"OK","timestamp":"the time","url":"http:\\/\\/dumb.com"}' self.pipe.producer.send.assert_called_once_with('prefix.crawled_firehose', expected) self.pipe.producer.send.reset_mock() # test normal send, with appids item = self._get_item() self.pipe.appid_topics = True self.pipe.process_item(item, spider) self.pipe.producer.send.assert_called_with('prefix.crawled_app', expected) self.pipe.producer.send.reset_mock() # test base64 encode item = self._get_item() self.pipe.appid_topics = False self.pipe.use_base64 = True self.pipe.process_item(item, spider) expected = '{"appid":"app","attrs":{},"body":"dGV4dA==","crawlid":"crawlid","links":[],"request_headers":{},"response_headers":{},"response_url":"http:\\/\\/dumb.com","status_code":200,"status_msg":"OK","timestamp":"the time","url":"http:\\/\\/dumb.com"}' self.pipe.producer.send.assert_called_once_with('prefix.crawled_firehose', expected) # test kafka exception item = self._get_item() copy = deepcopy(item) copy['success'] = False copy['exception'] = 'traceback' # send should not crash the pipeline self.pipe.producer.send = MagicMock(side_effect=KafkaTimeoutError('bad kafka')) ret_val = self.pipe.process_item(item, spider)
Example #5
Source File: kafka.py From py-timeexecution with Apache License 2.0 | 5 votes |
def bulk_write(self, metrics): """ Write multiple metrics to kafka in one request Args: metrics (list): """ try: for metric in metrics: self.producer.send(self.topic, metric) self.producer.flush() except (KafkaTimeoutError, NoBrokersAvailable) as exc: logger.warning('bulk_write metrics %r failure %r', metrics, exc)
Example #6
Source File: test_kafka.py From py-timeexecution with Apache License 2.0 | 5 votes |
def test_write_error_warning(self, mocked_logger): transport_error = KafkaTimeoutError('mocked error') es_index_error_ctx = mock.patch( 'time_execution.backends.kafka.KafkaProducer.send', side_effect=transport_error ) frozen_time_ctx = freeze_time('2016-07-13') with es_index_error_ctx, frozen_time_ctx: self.backend.write(name='test:metric', value=None) mocked_logger.warning.assert_called_once_with( 'writing metric %r failure %r', {'timestamp': datetime(2016, 7, 13), 'value': None, 'name': 'test:metric'}, transport_error, )
Example #7
Source File: test_kafka.py From py-timeexecution with Apache License 2.0 | 5 votes |
def test_bulk_write_error(self, mocked_logger): transport_error = KafkaTimeoutError('mocked error') es_index_error_ctx = mock.patch( 'time_execution.backends.kafka.KafkaProducer.send', side_effect=transport_error ) metrics = [1, 2, 3] with es_index_error_ctx: self.backend.bulk_write(metrics) mocked_logger.warning.assert_called_once_with('bulk_write metrics %r failure %r', metrics, transport_error)
Example #8
Source File: kafkaProduce.py From openwhisk-package-kafka with Apache License 2.0 | 5 votes |
def getResultForException(e): if isinstance(e, KafkaTimeoutError): return {'error': 'Timed out communicating with Message Hub'} elif isinstance(e, AuthenticationFailedError): return {'error': 'Authentication failed'} elif isinstance(e, NoBrokersAvailable): return {'error': 'No brokers available. Check that your supplied brokers are correct and available.'} else: return {'error': '{}'.format(e)}
Example #9
Source File: messageHubProduce.py From openwhisk-package-kafka with Apache License 2.0 | 5 votes |
def getResultForException(e): if isinstance(e, KafkaTimeoutError): return {'error': 'Timed out communicating with Message Hub'} elif isinstance(e, AuthenticationFailedError): return {'error': 'Authentication failed'} elif isinstance(e, NoBrokersAvailable): return {'error': 'No brokers available. Check that your supplied brokers are correct and available.'} else: return {'error': '{}'.format(e)}
Example #10
Source File: __init__.py From karapace with Apache License 2.0 | 5 votes |
def produce_message(self, *, topic: str, key: bytes, value: bytes, partition: int = None) -> dict: prod = None try: prod = await self.get_producer() result = await asyncio.wait_for( fut=prod.send_and_wait(topic, key=key, value=value, partition=partition), loop=self.loop, timeout=self.kafka_timeout ) return { "offset": result.offset if result else -1, "partition": result.topic_partition.partition if result else 0 } except AssertionError as e: self.log.exception("Invalid data") return {"error_code": 1, "error": str(e)} except (KafkaTimeoutError, asyncio.TimeoutError): self.log.exception("Timed out waiting for publisher") # timeouts are retriable return {"error_code": 1, "error": "timed out waiting to publish message"} except BrokerResponseError as e: self.log.exception(e) resp = {"error_code": 1, "error": e.description} if hasattr(e, "retriable") and e.retriable: resp["error_code"] = 2 return resp finally: if prod: await self.producer_queue.put(prod)